Compare commits
22 Commits
d4ce9c3ee3
...
0ecb4a0aff
|
@ -0,0 +1,42 @@
|
|||
# API Server
|
||||
|
||||
`api_server.py` is a Flask script that loads and executes Flask blueprints from the `blueprints` directory.
|
||||
|
||||
|
||||
Currently it's intended to combine oggit and ogrepository.
|
||||
|
||||
|
||||
# Usage
|
||||
|
||||
## Ubuntu 24.04
|
||||
|
||||
sudo apt install -y python3-flask python3-paramiko opengnsys-flask-executor opengnsys-flask-restx
|
||||
|
||||
The `opengnsys-flask-executor` and `opengnsys-flask-restx` packages are available on the OpenGnsys package server.
|
||||
|
||||
Run with:
|
||||
|
||||
./api_server.py
|
||||
|
||||
|
||||
# Operation
|
||||
|
||||
## Requirements
|
||||
|
||||
The gitapi is designed to run within an existing opengnsys environment. It should be installed in an ogrepository.
|
||||
|
||||
|
||||
## API Examples
|
||||
|
||||
### Get list of branches
|
||||
|
||||
$ curl -L http://localhost:5000/repositories/linux/branches
|
||||
{
|
||||
"branches": [
|
||||
"master"
|
||||
]
|
||||
}
|
||||
|
||||
### Synchronize with remote repository
|
||||
|
||||
curl --header "Content-Type: application/json" --data '{"remote_repository":"foobar"}' -X POST -L http://localhost:5000/repositories/linux/sync
|
|
@ -0,0 +1,170 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, "/usr/share/opengnsys-modules/python3/dist-packages")
|
||||
sys.path.insert(0, "/opt/opengnsys/oggit/bin/")
|
||||
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import uuid
|
||||
import argparse
|
||||
import yaml
|
||||
from flask import Flask, request
|
||||
from flask_executor import Executor
|
||||
from flask_restx import Api
|
||||
from flasgger import Swagger
|
||||
from werkzeug.exceptions import HTTPException
|
||||
from systemd.journal import JournalHandler
|
||||
|
||||
class FakeArgs:
|
||||
def __init__(self):
|
||||
self.verbose = False
|
||||
self.listen = None
|
||||
self.port = None
|
||||
self.debug = None
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="api_server.py",
|
||||
description="OpenGnsys Repository API Server",
|
||||
)
|
||||
|
||||
debug_enabled = False
|
||||
listen_host = '0.0.0.0'
|
||||
listen_port = 8006
|
||||
|
||||
is_gunicorn = "gunicorn" in os.environ.get("SERVER_SOFTWARE", "")
|
||||
|
||||
if not is_gunicorn:
|
||||
# Gunicorn passes us all the arguments passed to gunicorn itself, which of course crashes here since we don't recognize them.
|
||||
# Deal with this by not doing argument handling when running under gunicorn
|
||||
parser.add_argument('--debug', action='store_true', help="Enable debug output")
|
||||
parser.add_argument('--listen', metavar="HOST", help="Listen address")
|
||||
parser.add_argument('--port', metavar="PORT", help="Listen port")
|
||||
parser.add_argument("-v", "--verbose", action="store_true", help = "Verbose console output")
|
||||
|
||||
args = parser.parse_args()
|
||||
else:
|
||||
args = FakeArgs()
|
||||
|
||||
|
||||
log = logging.getLogger('api_server')
|
||||
log.addHandler(JournalHandler())
|
||||
|
||||
if args.verbose:
|
||||
log.addHandler(logging.StreamHandler(stream=sys.stderr))
|
||||
log.setLevel(logging.DEBUG)
|
||||
else:
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
|
||||
if is_gunicorn:
|
||||
log.info("Running under gunicorn, argument handling disabled.")
|
||||
|
||||
if args.listen:
|
||||
listen_host = args.listen
|
||||
|
||||
if args.port:
|
||||
listen_port = args.port
|
||||
|
||||
|
||||
if args.debug:
|
||||
debug_enabled = True
|
||||
|
||||
|
||||
api_base_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
blueprints_dir = os.path.join(api_base_dir, 'blueprints')
|
||||
installer_dir = os.path.join(api_base_dir, '../installer')
|
||||
|
||||
|
||||
|
||||
|
||||
sys.path.insert(0, installer_dir)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Create an instance of the Flask class
|
||||
app = Flask(__name__)
|
||||
api = Api(app,
|
||||
version='0.50',
|
||||
title = "OpenGnsys Git API",
|
||||
description = "API for managing disk images stored in Git",
|
||||
doc = "/apidocs/")
|
||||
|
||||
|
||||
executor = Executor(app)
|
||||
|
||||
log.info("Loading blueprints from %s", blueprints_dir)
|
||||
sys.path.insert(0, blueprints_dir)
|
||||
|
||||
for filename in os.listdir(blueprints_dir):
|
||||
if filename.endswith('.py'):
|
||||
|
||||
log.info("Loading %s/%s", blueprints_dir, filename)
|
||||
|
||||
module_name = filename.replace(".py", "")
|
||||
swagger_file = os.path.join(blueprints_dir, filename.replace(".py", ".yaml"))
|
||||
|
||||
log.info("Importing %s", module_name)
|
||||
importlib.invalidate_caches()
|
||||
module = importlib.import_module(module_name)
|
||||
log.debug("Returned: %s", module)
|
||||
|
||||
app.register_blueprint(module.blueprint)
|
||||
|
||||
if os.path.exists(swagger_file):
|
||||
log.info("Loading Swagger documentation from %s...", swagger_file)
|
||||
|
||||
with open(swagger_file, "r", encoding='utf-8') as file:
|
||||
swagger_template = yaml.safe_load(file)
|
||||
|
||||
#print(f"Template: {swagger_template}")
|
||||
#swagger = Swagger(app, template=swagger_template)
|
||||
else:
|
||||
log.warning("Swagger not found for this module, looked in %s", swagger_file)
|
||||
|
||||
|
||||
|
||||
@app.errorhandler(HTTPException)
|
||||
def handle_exception(e):
|
||||
"""Return JSON for HTTP errors.
|
||||
|
||||
We create and log an error UUID for each error, and use journald's additional fields for easier searching.
|
||||
"""
|
||||
# start with the correct headers and status code from the error
|
||||
response = e.get_response()
|
||||
|
||||
errid = uuid.uuid4().hex
|
||||
|
||||
|
||||
response = {
|
||||
"errcode": e.code,
|
||||
"errname": e.name,
|
||||
"description": e.description,
|
||||
}
|
||||
|
||||
log.error("Error ID %s: code %i, name %s, description %s", errid, e.code, e.name, e.description, extra = { "error_id" : errid, "errcode" : e.code, "errname" : e.name, "description" : e.description })
|
||||
|
||||
return response, 500
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
log.info("Request from %s: %s %s %s %s", request.remote_addr, request.method, request.scheme, request.full_path, response.status,
|
||||
extra = {"remote_addr" : request.remote_addr, "method" : request.method, "scheme" : request.scheme, "full_path" : request.full_path, "status" : response.status})
|
||||
|
||||
if debug_enabled:
|
||||
log.debug("Response: %s", response.data, extra = {"response" : response.data})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
||||
|
||||
# Run the Flask app
|
||||
if __name__ == '__main__':
|
||||
print(f"Map: {app.url_map}")
|
||||
app.run(debug=debug_enabled, host=listen_host, port=listen_port)
|
|
@ -0,0 +1,713 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
This module provides a Flask-based API for managing Git repositories in the OpenGnsys system.
|
||||
It includes endpoints for creating, deleting, synchronizing, backing up, and performing garbage
|
||||
collection on Git repositories. The API also provides endpoints for retrieving repository
|
||||
information such as the list of repositories and branches, as well as checking the status of
|
||||
asynchronous tasks.
|
||||
|
||||
Classes:
|
||||
None
|
||||
|
||||
Functions:
|
||||
do_repo_backup(repo, params)
|
||||
|
||||
do_repo_sync(repo, params)
|
||||
|
||||
do_repo_gc(repo)
|
||||
|
||||
home()
|
||||
|
||||
get_repositories()
|
||||
|
||||
create_repo(repo)
|
||||
|
||||
sync_repo(repo)
|
||||
|
||||
backup_repository(repo)
|
||||
|
||||
gc_repo(repo)
|
||||
|
||||
tasks_status(task_id)
|
||||
|
||||
delete_repo(repo)
|
||||
|
||||
get_repository_branches(repo)
|
||||
|
||||
health_check()
|
||||
|
||||
Constants:
|
||||
REPOSITORIES_BASE_PATH (str): The base path where Git repositories are stored.
|
||||
|
||||
Global Variables:
|
||||
app (Flask): The Flask application instance.
|
||||
executor (Executor): The Flask-Executor instance for managing asynchronous tasks.
|
||||
tasks (dict): A dictionary to store the status of asynchronous tasks.
|
||||
"""
|
||||
|
||||
# pylint: disable=locally-disabled, line-too-long
|
||||
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
import time
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
import git
|
||||
from opengnsys_git_installer import OpengnsysGitInstaller
|
||||
from flask import Blueprint, request
|
||||
from flask_restx import Resource, Api
|
||||
import paramiko
|
||||
from systemd.journal import JournalHandler
|
||||
|
||||
|
||||
debug_enabled = False
|
||||
|
||||
log = logging.getLogger('gitapi')
|
||||
log.addHandler(JournalHandler())
|
||||
log.setLevel(logging.INFO)
|
||||
log.info("Started")
|
||||
|
||||
|
||||
REPOSITORIES_BASE_PATH = "/opt/opengnsys/ogrepository/oggit/git/oggit/"
|
||||
|
||||
start_time = time.time()
|
||||
tasks = {}
|
||||
tasks_max = 1024
|
||||
|
||||
blueprint = Blueprint('git_api', __name__, template_folder='templates', url_prefix = '/oggit/v1')
|
||||
api = Api(blueprint)
|
||||
git_ns = api
|
||||
|
||||
|
||||
def add_task(future):
|
||||
task_id = uuid.uuid4().hex
|
||||
task_data = {
|
||||
"future" : future,
|
||||
"start_time" : time.time()
|
||||
}
|
||||
|
||||
while len(tasks) >= tasks_max:
|
||||
oldest_task_id = min(tasks, key=lambda k: tasks[k]['start_time'])
|
||||
task = tasks[task_id]["future"]
|
||||
if task.running():
|
||||
log.error("Cancelling still running task %s, maximum task limit of %i reached", task_id, tasks_max)
|
||||
task.cancel()
|
||||
|
||||
del tasks[oldest_task_id]
|
||||
|
||||
tasks[task_id] = task_data
|
||||
return task_id
|
||||
|
||||
def do_repo_backup(repo, params):
|
||||
"""
|
||||
Creates a backup of the specified Git repository and uploads it to a remote server via SFTP.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to back up.
|
||||
params (dict): A dictionary containing the following keys:
|
||||
- ssh_server (str): The SSH server address.
|
||||
- ssh_port (int): The SSH server port.
|
||||
- ssh_user (str): The SSH username.
|
||||
- filename (str): The remote filename where the backup will be stored.
|
||||
|
||||
Returns:
|
||||
bool: True if the backup was successful.
|
||||
"""
|
||||
|
||||
git_repo_path = f"{REPOSITORIES_BASE_PATH}/{repo}.git"
|
||||
git_repo = git.Repo(git_repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', git_repo_path)
|
||||
|
||||
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
ssh.connect(params["ssh_server"], params["ssh_port"], params["ssh_user"])
|
||||
sftp = ssh.open_sftp()
|
||||
|
||||
|
||||
with sftp.file(params["filename"], mode='wb+') as remote_file:
|
||||
git_repo.archive(remote_file, format="tar.gz")
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def do_repo_sync(repo, params):
|
||||
"""
|
||||
Synchronizes a local Git repository with a remote repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the local repository to synchronize.
|
||||
params (dict): A dictionary containing the remote repository URL with the key "remote_repository".
|
||||
|
||||
Returns:
|
||||
list: A list of dictionaries, each containing:
|
||||
- "local_ref" (str): The name of the local reference.
|
||||
- "remote_ref" (str): The name of the remote reference.
|
||||
- "summary" (str): A summary of the push operation for the reference.
|
||||
"""
|
||||
git_repo_path = f"{REPOSITORIES_BASE_PATH}/{repo}.git"
|
||||
git_repo = git.Repo(git_repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', git_repo_path)
|
||||
|
||||
|
||||
# Recreate the remote every time, it might change
|
||||
if "backup" in git_repo.remotes:
|
||||
git_repo.delete_remote("backup")
|
||||
|
||||
backup_repo = git_repo.create_remote("backup", params["remote_repository"])
|
||||
pushed_references = backup_repo.push("*:*")
|
||||
results = []
|
||||
|
||||
# This gets returned to the API
|
||||
for ref in pushed_references:
|
||||
results = results + [ {"local_ref" : ref.local_ref.name, "remote_ref" : ref.remote_ref.name, "summary" : ref.summary }]
|
||||
|
||||
return results
|
||||
|
||||
def do_repo_gc(repo):
|
||||
"""
|
||||
Perform garbage collection on the specified Git repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to perform garbage collection on.
|
||||
|
||||
Returns:
|
||||
bool: True if the garbage collection command was executed successfully.
|
||||
"""
|
||||
git_repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
git_repo = git.Repo(git_repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', git_repo_path)
|
||||
|
||||
git_repo.git.gc()
|
||||
|
||||
|
||||
|
||||
# Define a route for the root URL
|
||||
@api.route('/')
|
||||
class GitLib(Resource):
|
||||
|
||||
#@api.doc('home')
|
||||
def get(self):
|
||||
"""
|
||||
Home route that returns a JSON response with a welcome message for the OpenGnsys Git API.
|
||||
|
||||
Returns:
|
||||
Response: A Flask JSON response containing a welcome message.
|
||||
"""
|
||||
log.info("Root URL accessed")
|
||||
|
||||
return {
|
||||
"message": "OpenGnsys Git API"
|
||||
}
|
||||
|
||||
@git_ns.route('/repositories')
|
||||
class GitRepositories(Resource):
|
||||
def get(self):
|
||||
"""
|
||||
Retrieve a list of Git repositories.
|
||||
|
||||
This endpoint scans the OpenGnsys image path for directories that
|
||||
appear to be Git repositories (i.e., they contain a "HEAD" file).
|
||||
It returns a JSON response containing the names of these repositories.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a list of repository names or an
|
||||
error message if the repository storage is not found.
|
||||
- 200 OK: When the repositories are successfully retrieved.
|
||||
- 500 Internal Server Error: When the repository storage is not found.
|
||||
|
||||
Example JSON response:
|
||||
{
|
||||
"repositories": ["repo1", "repo2"]
|
||||
}
|
||||
"""
|
||||
|
||||
if not os.path.isdir(REPOSITORIES_BASE_PATH):
|
||||
log.error("Can't list repositories. Repository storage at %s not found", REPOSITORIES_BASE_PATH, extra = {"path" : REPOSITORIES_BASE_PATH})
|
||||
return {"error": "Repository storage not found, git functionality may not be installed."}, 500
|
||||
|
||||
repos = []
|
||||
for entry in os.scandir(REPOSITORIES_BASE_PATH):
|
||||
if entry.is_dir(follow_symlinks=False) and os.path.isfile(os.path.join(entry.path, "HEAD")):
|
||||
name = entry.name
|
||||
if name.endswith(".git"):
|
||||
name = name[:-4]
|
||||
|
||||
repos = repos + [name]
|
||||
|
||||
log.info("Returning %i repositories", len(repos))
|
||||
return {
|
||||
"repositories": repos
|
||||
}
|
||||
|
||||
def post(self):
|
||||
"""
|
||||
Create a new Git repository.
|
||||
|
||||
This endpoint creates a new Git repository with the specified name.
|
||||
If the repository already exists, it returns a status message indicating so.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to be created.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a status message and HTTP status code.
|
||||
- 200: If the repository already exists.
|
||||
- 201: If the repository is successfully created.
|
||||
"""
|
||||
data = request.json
|
||||
|
||||
if data is None:
|
||||
log.error("Can't create repository, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
repo = data["name"]
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if os.path.isdir(repo_path):
|
||||
log.error("Can't create repository %s, already exists at %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path})
|
||||
return {"status": "Repository already exists"}, 200
|
||||
|
||||
|
||||
installer = OpengnsysGitInstaller()
|
||||
installer.add_forgejo_repo(repo)
|
||||
|
||||
#installer.init_git_repo(repo + ".git")
|
||||
|
||||
log.info("Repository %s created", repo, extra = {"repository" : repo})
|
||||
return {"status": "Repository created"}, 201
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>/sync')
|
||||
class GitRepoSync(Resource):
|
||||
def post(self, repo):
|
||||
"""
|
||||
Synchronize a repository with a remote repository.
|
||||
|
||||
This endpoint triggers the synchronization process for a specified repository.
|
||||
It expects a JSON payload with the remote repository details.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to be synchronized.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response indicating the status of the synchronization process.
|
||||
- 200: If the synchronization process has started successfully.
|
||||
- 400: If the request payload is missing or invalid.
|
||||
- 404: If the specified repository is not found.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't sync repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
|
||||
data = request.json
|
||||
|
||||
if data is None:
|
||||
log.error("Can't create repository, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
if not "remote_repository" in data:
|
||||
log.error("Can't create repository, parameter 'remote_repository' missing")
|
||||
return {"error" : "Parameter 'remote_repository' missing"}, 400
|
||||
|
||||
|
||||
future = executor.submit(do_repo_sync, repo, data)
|
||||
task_id = add_task(future)
|
||||
|
||||
log.info("Starting synchronization of repository %s, task %s", repo, task_id, extra = {"repository" : repo, "task_id" : task_id})
|
||||
return {"status": "started", "task_id" : task_id}, 200
|
||||
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>/backup')
|
||||
class GitRepoBackup(Resource):
|
||||
def backup_repository(self, repo):
|
||||
"""
|
||||
Backup a specified repository.
|
||||
|
||||
Endpoint: POST /repositories/<repo>/backup
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to back up.
|
||||
|
||||
Request Body (JSON):
|
||||
ssh_port (int, optional): The SSH port to use for the backup. Defaults to 22.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response indicating the status of the backup operation.
|
||||
- If the repository is not found, returns a 404 error with a message.
|
||||
- If the request body is missing, returns a 400 error with a message.
|
||||
- If the backup process starts successfully, returns a 200 status with the task ID.
|
||||
|
||||
Notes:
|
||||
- The repository path is constructed by appending ".git" to the repository name.
|
||||
- The backup operation is performed asynchronously using a thread pool executor.
|
||||
- The task ID of the backup operation is generated using UUID and stored in a global tasks dictionary.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't backup repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
|
||||
data = request.json
|
||||
if data is None:
|
||||
log.error("Can't create repository, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
|
||||
if not "ssh_port" in data:
|
||||
data["ssh_port"] = 22
|
||||
|
||||
|
||||
future = executor.submit(do_repo_backup, repo, data)
|
||||
task_id = add_task(future)
|
||||
|
||||
log.info("Starting backup of repository %s, task %s", repo, task_id, extra = {"repository" : repo, "task_id" : task_id})
|
||||
return {"status": "started", "task_id" : task_id}, 200
|
||||
|
||||
@git_ns.route('/repositories/<repo>/compact', methods=['POST'])
|
||||
class GitRepoCompact(Resource):
|
||||
def post(self, repo):
|
||||
"""
|
||||
Initiates a garbage collection (GC) process for a specified Git repository.
|
||||
|
||||
This endpoint triggers an asynchronous GC task for the given repository.
|
||||
The task is submitted to an executor, and a unique task ID is generated
|
||||
and returned to the client.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to perform GC on.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing the status of the request and
|
||||
a unique task ID if the repository is found, or an error
|
||||
message if the repository is not found.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't compact repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
future = executor.submit(do_repo_gc, repo)
|
||||
task_id = add_task(future)
|
||||
|
||||
log.info("Starting compaction of repository %s, task %s", repo, task_id, extra = {"repository" : repo, "task_id" : task_id})
|
||||
return {"status": "started", "task_id" : task_id}, 200
|
||||
|
||||
|
||||
@git_ns.route('/tasks/<task_id>/status')
|
||||
class GitTaskStatus(Resource):
|
||||
def get(self, task_id):
|
||||
"""
|
||||
Endpoint to check the status of a specific task.
|
||||
|
||||
Args:
|
||||
task_id (str): The unique identifier of the task.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing the status of the task.
|
||||
- If the task is not found, returns a 404 error with an error message.
|
||||
- If the task is completed, returns a 200 status with the result.
|
||||
- If the task is still in progress, returns a 202 status indicating the task is in progress.
|
||||
"""
|
||||
if not task_id in tasks:
|
||||
log.error("Task %s was not found", task_id, extra = {"task_id" : task_id})
|
||||
return {"error": "Task not found"}, 404
|
||||
|
||||
future = tasks[task_id]["future"]
|
||||
|
||||
try:
|
||||
if future.done():
|
||||
result = future.result()
|
||||
log.info("Returning completion of task %s", task_id, extra = {"task_id" : task_id})
|
||||
return {"status" : "completed", "result" : result}, 200
|
||||
else:
|
||||
log.info("Task %s is still in progress", task_id, extra = {"task_id" : task_id})
|
||||
return {"status" : "in progress"}, 202
|
||||
except Exception as e:
|
||||
errid = uuid.uuid4().hex
|
||||
|
||||
|
||||
log.error("Task %s failed with exception %s, UUID %s", task_id, traceback.format_exception(e), errid, extra = {"task_id" : task_id, "exception" : traceback.format_exception(e), "error_id" : errid})
|
||||
return {"status" : "internal error", "error_id" : errid }, 500
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>', methods=['DELETE'])
|
||||
class GitRepo(Resource):
|
||||
def delete(self, repo):
|
||||
"""
|
||||
Deletes a Git repository.
|
||||
|
||||
This endpoint deletes a Git repository specified by the `repo` parameter.
|
||||
If the repository does not exist, it returns a 404 error with a message
|
||||
indicating that the repository was not found. If the repository is successfully
|
||||
deleted, it returns a 200 status with a message indicating that the repository
|
||||
was deleted.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to delete.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a status message and the appropriate HTTP status code.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't delete repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
|
||||
shutil.rmtree(repo_path)
|
||||
log.info("Deleted repository %s", repo, extra = {"repository" : repo})
|
||||
return {"status": "Repository deleted"}, 200
|
||||
|
||||
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>/branches')
|
||||
class GitRepoBranches(Resource):
|
||||
def get(self, repo):
|
||||
"""
|
||||
Retrieve the list of branches for a given repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "branches" key containing a list of branch names.
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get branches of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
|
||||
branches = []
|
||||
for branch in git_repo.branches:
|
||||
branches = branches + [branch.name]
|
||||
|
||||
log.info("Returning %i branches", len(branches))
|
||||
return {
|
||||
"branches": branches
|
||||
}
|
||||
|
||||
@git_ns.route('/repositories/<repo>/branches/<branch>')
|
||||
class GitRepoBranchesDeleter(Resource):
|
||||
def delete(self, repo, branch):
|
||||
"""Delete a given branch in a given repository
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "status" key containing "deleted"
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" or "Branch not found"
|
||||
"""
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get branches of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
|
||||
if not branch in git_repo.branches:
|
||||
log.error("Can't delete branch %s, not found in repository %s", branch, repo, extra = {"repository" : repo, "branch" : branch})
|
||||
return {"error": "Branch not found"}, 404
|
||||
|
||||
git_repo.delete_head(branch)
|
||||
log.info("Branch %s of repository %s deleted", branch, repo, extra = {"repository" : repo, "branch" : branch})
|
||||
return {"status": "deleted"}, 200
|
||||
|
||||
def post(self, repo, branch):
|
||||
"""Create a given branch in a given repository
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "status" key containing "deleted"
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" or "Branch not found"
|
||||
- 409: A JSON object with an "error" key containing the message "Branch already exists"
|
||||
"""
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get branches of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
data = request.json
|
||||
if data is None:
|
||||
log.error("Can't create branch, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
if not "commit" in data:
|
||||
log.error("Can't create branch, commit parameter missing")
|
||||
return {"error" : "commit parameter missing"}, 400
|
||||
|
||||
|
||||
if branch in git_repo.branches:
|
||||
log.error("Can't create branch %s, already found in repository %s", branch, repo, extra = {"repository" : repo, "branch" : branch})
|
||||
return {"error": "Branch already exists"}, 409
|
||||
|
||||
git_repo.create_head(branch, commit = data["commit"] )
|
||||
log.info("Branch %s of repository %s created", branch, repo, extra = {"repository" : repo, "branch" : branch})
|
||||
return {"status": "created"}, 200
|
||||
|
||||
@git_ns.route('/repositories/<repo>/tags')
|
||||
class GitRepoTags(Resource):
|
||||
def get(self, repo):
|
||||
"""
|
||||
Retrieve the list of tags for a given repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of tags names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "tags" key containing a list of tags names.
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get tags of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
|
||||
tags = []
|
||||
for tag in git_repo.tags:
|
||||
tags = tags + [tag.name]
|
||||
|
||||
log.info("Returning %i tags", len(tags))
|
||||
return {
|
||||
"tags": tags
|
||||
}
|
||||
|
||||
@git_ns.route('/repositories/<repo>/tags/<tag>')
|
||||
class GitRepoTagsDeleter(Resource):
|
||||
def delete(self, repo, tag):
|
||||
"""Delete a given tag in a given repository
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of tag names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "status" key containing "deleted"
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" or "Tag not found"
|
||||
"""
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get tags of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
|
||||
if not tag in git_repo.tags:
|
||||
log.error("Can't delete tag %s, not found in repository %s", tag, repo, extra = {"repository" : repo, "tag" : tag})
|
||||
return {"error": "Tag not found"}, 404
|
||||
|
||||
git_repo.delete_head(tag)
|
||||
log.info("Tag %s of repository %s deleted", tag, repo, extra = {"repository" : repo, "tag" : tag})
|
||||
return {"status": "deleted"}, 200
|
||||
|
||||
def post(self, repo, tag):
|
||||
"""Create a given tag in a given repository
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a creation status
|
||||
- 200: A JSON object with a "status" key containing "created"
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found"
|
||||
- 409: A JSON object with an "error" key containing the message "Tag already exists"
|
||||
"""
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get tags of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
data = request.json
|
||||
if data is None:
|
||||
log.error("Can't create tag, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
if not "commit" in data:
|
||||
log.error("Can't create tag, commit parameter missing")
|
||||
return {"error" : "commit parameter missing"}, 400
|
||||
|
||||
|
||||
if tag in git_repo.tags:
|
||||
log.error("Can't create tag %s, already found in repository %s", tag, repo, extra = {"repository" : repo, "tag" : tag})
|
||||
return {"error": "Tag already exists"}, 409
|
||||
git_repo.create_tag(tag, ref = data["commit"])
|
||||
|
||||
log.info("Tag %s of repository %s created", tag, repo, extra = {"repository" : repo, "tag" : tag})
|
||||
return {"status": "created"}, 200
|
||||
|
||||
@git_ns.route('/health')
|
||||
class GitHealth(Resource):
|
||||
def get(self):
|
||||
"""
|
||||
Health check endpoint.
|
||||
|
||||
This endpoint returns a JSON response indicating the health status of the application.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a status key set to "OK". Currently it always returns
|
||||
a successful value, but this endpoint can still be used to check that the API is
|
||||
active and functional.
|
||||
|
||||
"""
|
||||
log.info("Health check endpoint called")
|
||||
return {
|
||||
"status": "OK"
|
||||
}
|
||||
|
||||
@git_ns.route('/status')
|
||||
class GitStatus(Resource):
|
||||
def get(self):
|
||||
"""
|
||||
Status check endpoint.
|
||||
|
||||
This endpoint returns a JSON response indicating the status of the application.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with status information
|
||||
|
||||
"""
|
||||
log.info("Status endpoint called")
|
||||
|
||||
return {
|
||||
"uptime" : time.time() - start_time,
|
||||
"active_tasks" : len(tasks)
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,5 @@
|
|||
oggit (0.5) UNRELEASED; urgency=medium
|
||||
|
||||
* Initial release.
|
||||
|
||||
-- OpenGnsys <opengnsys@opengnsys.es> Fri, 14 Mar 2025 08:40:35 +0100
|
|
@ -0,0 +1,38 @@
|
|||
Source: oggit
|
||||
Section: unknown
|
||||
Priority: optional
|
||||
Maintainer: OpenGnsys <opengnsys@opengnsys.es>
|
||||
Rules-Requires-Root: no
|
||||
Build-Depends:
|
||||
debhelper-compat (= 13),
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://opengnsys.es
|
||||
#Vcs-Browser: https://salsa.debian.org/debian/ogboot
|
||||
#Vcs-Git: https://salsa.debian.org/debian/ogboot.git
|
||||
|
||||
Package: oggit
|
||||
Architecture: any
|
||||
Multi-Arch: foreign
|
||||
Depends:
|
||||
${shlibs:Depends},
|
||||
${misc:Depends},
|
||||
bsdextrautils,
|
||||
debconf (>= 1.5.0),
|
||||
gunicorn,
|
||||
opengnsys-flask-executor,
|
||||
opengnsys-flask-restx,
|
||||
opengnsys-libarchive-c,
|
||||
python3,
|
||||
python3-aniso8601,
|
||||
python3-flasgger,
|
||||
python3-flask,
|
||||
python3-flask,
|
||||
python3-git,
|
||||
python3-paramiko,
|
||||
python3-requests,
|
||||
python3-termcolor,
|
||||
python3-tqdm,
|
||||
opengnsys-forgejo (>= 0.5)
|
||||
Conflicts:
|
||||
Description: Opengnsys Oggit package
|
||||
Files for OpenGnsys Git support
|
|
@ -0,0 +1,43 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Source: <url://example.com>
|
||||
Upstream-Name: ogboot
|
||||
Upstream-Contact: <preferred name and address to reach the upstream project>
|
||||
|
||||
Files:
|
||||
*
|
||||
Copyright:
|
||||
<years> <put author's name and email here>
|
||||
<years> <likewise for another author>
|
||||
License: GPL-3.0+
|
||||
|
||||
Files:
|
||||
debian/*
|
||||
Copyright:
|
||||
2025 vagrant <vagrant@build>
|
||||
License: GPL-3.0+
|
||||
|
||||
License: GPL-3.0+
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
.
|
||||
This package is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
Comment:
|
||||
On Debian systems, the complete text of the GNU General
|
||||
Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
|
||||
|
||||
# Please also look if there are files or directories which have a
|
||||
# different copyright/license attached and list them here.
|
||||
# Please avoid picking licenses with terms that are more restrictive than the
|
||||
# packaged work, as it may make Debian's contributions unacceptable upstream.
|
||||
#
|
||||
# If you need, there are some extra license texts available in two places:
|
||||
# /usr/share/debhelper/dh_make/licenses/
|
||||
# /usr/share/common-licenses/
|
|
@ -0,0 +1,3 @@
|
|||
README.source
|
||||
README.Debian
|
||||
README
|
|
@ -0,0 +1,3 @@
|
|||
/opt/opengnsys/images/git
|
||||
/opt/opengnsys/ogrepository/oggit
|
||||
/opt/opengnsys/ogrepository/oggit/api
|
|
@ -0,0 +1,6 @@
|
|||
api_server.py /opt/opengnsys/ogrepository/oggit/api
|
||||
../installer/opengnsys_git_installer.py /opt/opengnsys/oggit/bin
|
||||
blueprints/gitapi.py /opt/opengnsys/ogrepository/oggit/api/blueprints
|
||||
blueprints/repo_api.py /opt/opengnsys/ogrepository/oggit/api/blueprints
|
||||
opengnsys-repository-api.service /etc/systemd/system
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
/usr/bin/systemctl daemon-reload
|
||||
/usr/bin/systemctl enable --now opengnsys-repository-api
|
|
@ -0,0 +1,2 @@
|
|||
misc:Depends=
|
||||
misc:Pre-Depends=
|
|
@ -0,0 +1,12 @@
|
|||
Package: oggit
|
||||
Version: 0.5
|
||||
Architecture: amd64
|
||||
Maintainer: OpenGnsys <opengnsys@opengnsys.es>
|
||||
Installed-Size: 193
|
||||
Depends: bsdextrautils, debconf (>= 1.5.0), gunicorn, opengnsys-flask-executor, opengnsys-flask-restx, opengnsys-libarchive-c, python3, python3-aniso8601, python3-flasgger, python3-flask, python3-git, python3-paramiko, python3-requests, python3-termcolor, python3-tqdm
|
||||
Section: unknown
|
||||
Priority: optional
|
||||
Multi-Arch: foreign
|
||||
Homepage: https://opengnsys.es
|
||||
Description: Opengnsys Oggit package
|
||||
Files for OpenGnsys Git support
|
|
@ -0,0 +1,6 @@
|
|||
1a0024adb1d5e54ecff27759c5ac4a7d opt/opengnsys/oggit/bin/api_server.py
|
||||
bd0a968737c2d62ce44490414426ccbb opt/opengnsys/oggit/bin/opengnsys_git_installer.py
|
||||
af5f26474949def90af8794458f3f08d opt/opengnsys/oggit/blueprints/gitapi.py
|
||||
61618848e4caca8b22e3cc7b9c8706b8 opt/opengnsys/oggit/blueprints/repo_api.py
|
||||
48b531f72dec218fcdd61dce26f6b5ab usr/share/doc/oggit/changelog.gz
|
||||
8a13e4a3eb6149d56094319bbed84d0c usr/share/doc/oggit/copyright
|
|
@ -0,0 +1,139 @@
|
|||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, "/usr/share/opengnsys-modules/python3/dist-packages")
|
||||
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import uuid
|
||||
import argparse
|
||||
from flask import Flask, request
|
||||
from flask_executor import Executor
|
||||
from flask_restx import Api
|
||||
from werkzeug.exceptions import HTTPException
|
||||
from systemd.journal import JournalHandler
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="api_server.py",
|
||||
description="OpenGnsys Repository API Server",
|
||||
)
|
||||
|
||||
debug_enabled = False
|
||||
listen_host = '0.0.0.0'
|
||||
parser.add_argument('--debug', action='store_true', help="Enable debug output")
|
||||
parser.add_argument('--listen', metavar="HOST", help="Listen address")
|
||||
parser.add_argument("-v", "--verbose", action="store_true", help = "Verbose console output")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
|
||||
log = logging.getLogger('api_server')
|
||||
log.addHandler(JournalHandler())
|
||||
|
||||
if args.verbose:
|
||||
log.addHandler(logging.StreamHandler(stream=sys.stderr))
|
||||
log.setLevel(logging.DEBUG)
|
||||
else:
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
if args.listen:
|
||||
listen_host = args.listen
|
||||
|
||||
|
||||
if args.debug:
|
||||
debug_enabled = True
|
||||
|
||||
|
||||
api_base_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
blueprints_dir = os.path.join(api_base_dir, 'blueprints')
|
||||
installer_dir = os.path.join(api_base_dir, '../installer')
|
||||
|
||||
|
||||
|
||||
|
||||
sys.path.insert(0, installer_dir)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Create an instance of the Flask class
|
||||
app = Flask(__name__)
|
||||
api = Api(app,
|
||||
version='0.50',
|
||||
title = "OpenGnsys Git API",
|
||||
description = "API for managing disk images stored in Git",
|
||||
doc = "/swagger/")
|
||||
|
||||
|
||||
executor = Executor(app)
|
||||
|
||||
log.info("Loading blueprints from %s", blueprints_dir)
|
||||
sys.path.insert(0, blueprints_dir)
|
||||
|
||||
for filename in os.listdir(blueprints_dir):
|
||||
if filename.endswith('.py'):
|
||||
|
||||
log.info("Loading %s/%s", blueprints_dir, filename)
|
||||
|
||||
module_name = filename.replace(".py", "")
|
||||
|
||||
log.info("Importing %s", module_name)
|
||||
importlib.invalidate_caches()
|
||||
module = importlib.import_module(module_name)
|
||||
log.debug("Returned: %s", module)
|
||||
|
||||
app.register_blueprint(module.blueprint)
|
||||
|
||||
|
||||
@app.errorhandler(HTTPException)
|
||||
def handle_exception(e):
|
||||
"""Return JSON for HTTP errors.
|
||||
|
||||
We create and log an error UUID for each error, and use journald's additional fields for easier searching.
|
||||
"""
|
||||
# start with the correct headers and status code from the error
|
||||
response = e.get_response()
|
||||
|
||||
errid = uuid.uuid4().hex
|
||||
|
||||
|
||||
if debug_enabled:
|
||||
response = {
|
||||
"errcode": e.code,
|
||||
"errname": e.name,
|
||||
"description": e.description,
|
||||
}
|
||||
else:
|
||||
response = {
|
||||
"errcode" : 500,
|
||||
"errname" : "Internal error",
|
||||
"description": f"Please see the log for error {errid}",
|
||||
"error_id" : errid
|
||||
}
|
||||
|
||||
log.error("Error ID %s: code %i, name %s, description %s", errid, e.code, e.name, e.description, extra = { "error_id" : errid, "errcode" : e.code, "errname" : e.name, "description" : e.description })
|
||||
|
||||
return response, 500
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
log.info("Request from %s: %s %s %s %s", request.remote_addr, request.method, request.scheme, request.full_path, response.status,
|
||||
extra = {"remote_addr" : request.remote_addr, "method" : request.method, "scheme" : request.scheme, "full_path" : request.full_path, "status" : response.status})
|
||||
|
||||
if debug_enabled:
|
||||
log.debug("Response: %s", response.data, extra = {"response" : response.data})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
|
||||
|
||||
# Run the Flask app
|
||||
if __name__ == '__main__':
|
||||
print(f"Map: {app.url_map}")
|
||||
app.run(debug=debug_enabled, host=listen_host)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,573 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
This module provides a Flask-based API for managing Git repositories in the OpenGnsys system.
|
||||
It includes endpoints for creating, deleting, synchronizing, backing up, and performing garbage
|
||||
collection on Git repositories. The API also provides endpoints for retrieving repository
|
||||
information such as the list of repositories and branches, as well as checking the status of
|
||||
asynchronous tasks.
|
||||
|
||||
Classes:
|
||||
None
|
||||
|
||||
Functions:
|
||||
do_repo_backup(repo, params)
|
||||
|
||||
do_repo_sync(repo, params)
|
||||
|
||||
do_repo_gc(repo)
|
||||
|
||||
home()
|
||||
|
||||
get_repositories()
|
||||
|
||||
create_repo(repo)
|
||||
|
||||
sync_repo(repo)
|
||||
|
||||
backup_repository(repo)
|
||||
|
||||
gc_repo(repo)
|
||||
|
||||
tasks_status(task_id)
|
||||
|
||||
delete_repo(repo)
|
||||
|
||||
get_repository_branches(repo)
|
||||
|
||||
health_check()
|
||||
|
||||
Constants:
|
||||
REPOSITORIES_BASE_PATH (str): The base path where Git repositories are stored.
|
||||
|
||||
Global Variables:
|
||||
app (Flask): The Flask application instance.
|
||||
executor (Executor): The Flask-Executor instance for managing asynchronous tasks.
|
||||
tasks (dict): A dictionary to store the status of asynchronous tasks.
|
||||
"""
|
||||
|
||||
# pylint: disable=locally-disabled, line-too-long
|
||||
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
import time
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
import git
|
||||
from opengnsys_git_installer import OpengnsysGitInstaller
|
||||
from flask import Blueprint, request
|
||||
from flask_restx import Resource, Api
|
||||
import paramiko
|
||||
from systemd.journal import JournalHandler
|
||||
|
||||
|
||||
debug_enabled = False
|
||||
|
||||
log = logging.getLogger('gitapi')
|
||||
log.addHandler(JournalHandler())
|
||||
log.setLevel(logging.INFO)
|
||||
log.info("Started")
|
||||
|
||||
|
||||
REPOSITORIES_BASE_PATH = "/opt/opengnsys/ogrepository/oggit/git/oggit/"
|
||||
|
||||
start_time = time.time()
|
||||
tasks = {}
|
||||
tasks_max = 1024
|
||||
|
||||
blueprint = Blueprint('git_api', __name__, template_folder='templates', url_prefix = '/oggit/v1')
|
||||
api = Api(blueprint)
|
||||
git_ns = api
|
||||
|
||||
|
||||
def add_task(future):
|
||||
task_id = uuid.uuid4().hex
|
||||
task_data = {
|
||||
"future" : future,
|
||||
"start_time" : time.time()
|
||||
}
|
||||
|
||||
while len(tasks) >= tasks_max:
|
||||
oldest_task_id = min(tasks, key=lambda k: tasks[k]['start_time'])
|
||||
task = tasks[task_id]["future"]
|
||||
if task.running():
|
||||
log.error("Cancelling still running task %s, maximum task limit of %i reached", task_id, tasks_max)
|
||||
task.cancel()
|
||||
|
||||
del tasks[oldest_task_id]
|
||||
|
||||
tasks[task_id] = task_data
|
||||
return task_id
|
||||
|
||||
def do_repo_backup(repo, params):
|
||||
"""
|
||||
Creates a backup of the specified Git repository and uploads it to a remote server via SFTP.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to back up.
|
||||
params (dict): A dictionary containing the following keys:
|
||||
- ssh_server (str): The SSH server address.
|
||||
- ssh_port (int): The SSH server port.
|
||||
- ssh_user (str): The SSH username.
|
||||
- filename (str): The remote filename where the backup will be stored.
|
||||
|
||||
Returns:
|
||||
bool: True if the backup was successful.
|
||||
"""
|
||||
|
||||
git_repo_path = f"{REPOSITORIES_BASE_PATH}/{repo}.git"
|
||||
git_repo = git.Repo(git_repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', git_repo_path)
|
||||
|
||||
|
||||
ssh = paramiko.SSHClient()
|
||||
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
ssh.connect(params["ssh_server"], params["ssh_port"], params["ssh_user"])
|
||||
sftp = ssh.open_sftp()
|
||||
|
||||
|
||||
with sftp.file(params["filename"], mode='wb+') as remote_file:
|
||||
git_repo.archive(remote_file, format="tar.gz")
|
||||
|
||||
|
||||
return True
|
||||
|
||||
def do_repo_sync(repo, params):
|
||||
"""
|
||||
Synchronizes a local Git repository with a remote repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the local repository to synchronize.
|
||||
params (dict): A dictionary containing the remote repository URL with the key "remote_repository".
|
||||
|
||||
Returns:
|
||||
list: A list of dictionaries, each containing:
|
||||
- "local_ref" (str): The name of the local reference.
|
||||
- "remote_ref" (str): The name of the remote reference.
|
||||
- "summary" (str): A summary of the push operation for the reference.
|
||||
"""
|
||||
git_repo_path = f"{REPOSITORIES_BASE_PATH}/{repo}.git"
|
||||
git_repo = git.Repo(git_repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', git_repo_path)
|
||||
|
||||
|
||||
# Recreate the remote every time, it might change
|
||||
if "backup" in git_repo.remotes:
|
||||
git_repo.delete_remote("backup")
|
||||
|
||||
backup_repo = git_repo.create_remote("backup", params["remote_repository"])
|
||||
pushed_references = backup_repo.push("*:*")
|
||||
results = []
|
||||
|
||||
# This gets returned to the API
|
||||
for ref in pushed_references:
|
||||
results = results + [ {"local_ref" : ref.local_ref.name, "remote_ref" : ref.remote_ref.name, "summary" : ref.summary }]
|
||||
|
||||
return results
|
||||
|
||||
def do_repo_gc(repo):
|
||||
"""
|
||||
Perform garbage collection on the specified Git repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to perform garbage collection on.
|
||||
|
||||
Returns:
|
||||
bool: True if the garbage collection command was executed successfully.
|
||||
"""
|
||||
git_repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
git_repo = git.Repo(git_repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', git_repo_path)
|
||||
|
||||
git_repo.git.gc()
|
||||
|
||||
|
||||
|
||||
# Define a route for the root URL
|
||||
@api.route('/')
|
||||
class GitLib(Resource):
|
||||
|
||||
#@api.doc('home')
|
||||
def get(self):
|
||||
"""
|
||||
Home route that returns a JSON response with a welcome message for the OpenGnsys Git API.
|
||||
|
||||
Returns:
|
||||
Response: A Flask JSON response containing a welcome message.
|
||||
"""
|
||||
log.info("Root URL accessed")
|
||||
|
||||
return {
|
||||
"message": "OpenGnsys Git API"
|
||||
}
|
||||
|
||||
@git_ns.route('/repositories')
|
||||
class GitRepositories(Resource):
|
||||
def get(self):
|
||||
"""
|
||||
Retrieve a list of Git repositories.
|
||||
|
||||
This endpoint scans the OpenGnsys image path for directories that
|
||||
appear to be Git repositories (i.e., they contain a "HEAD" file).
|
||||
It returns a JSON response containing the names of these repositories.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a list of repository names or an
|
||||
error message if the repository storage is not found.
|
||||
- 200 OK: When the repositories are successfully retrieved.
|
||||
- 500 Internal Server Error: When the repository storage is not found.
|
||||
|
||||
Example JSON response:
|
||||
{
|
||||
"repositories": ["repo1", "repo2"]
|
||||
}
|
||||
"""
|
||||
|
||||
if not os.path.isdir(REPOSITORIES_BASE_PATH):
|
||||
log.error("Can't list repositories. Repository storage at %s not found", REPOSITORIES_BASE_PATH, extra = {"path" : REPOSITORIES_BASE_PATH})
|
||||
return {"error": "Repository storage not found, git functionality may not be installed."}, 500
|
||||
|
||||
repos = []
|
||||
for entry in os.scandir(REPOSITORIES_BASE_PATH):
|
||||
if entry.is_dir(follow_symlinks=False) and os.path.isfile(os.path.join(entry.path, "HEAD")):
|
||||
name = entry.name
|
||||
if name.endswith(".git"):
|
||||
name = name[:-4]
|
||||
|
||||
repos = repos + [name]
|
||||
|
||||
log.info("Returning %i repositories", len(repos))
|
||||
return {
|
||||
"repositories": repos
|
||||
}
|
||||
|
||||
def post(self):
|
||||
"""
|
||||
Create a new Git repository.
|
||||
|
||||
This endpoint creates a new Git repository with the specified name.
|
||||
If the repository already exists, it returns a status message indicating so.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to be created.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a status message and HTTP status code.
|
||||
- 200: If the repository already exists.
|
||||
- 201: If the repository is successfully created.
|
||||
"""
|
||||
data = request.json
|
||||
|
||||
if data is None:
|
||||
log.error("Can't create repository, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
repo = data["name"]
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if os.path.isdir(repo_path):
|
||||
log.error("Can't create repository %s, already exists at %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path})
|
||||
return {"status": "Repository already exists"}, 200
|
||||
|
||||
|
||||
installer = OpengnsysGitInstaller()
|
||||
installer.add_forgejo_repo(repo)
|
||||
|
||||
#installer.init_git_repo(repo + ".git")
|
||||
|
||||
log.info("Repository %s created", repo, extra = {"repository" : repo})
|
||||
return {"status": "Repository created"}, 201
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>/sync')
|
||||
class GitRepoSync(Resource):
|
||||
def post(self, repo):
|
||||
"""
|
||||
Synchronize a repository with a remote repository.
|
||||
|
||||
This endpoint triggers the synchronization process for a specified repository.
|
||||
It expects a JSON payload with the remote repository details.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to be synchronized.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response indicating the status of the synchronization process.
|
||||
- 200: If the synchronization process has started successfully.
|
||||
- 400: If the request payload is missing or invalid.
|
||||
- 404: If the specified repository is not found.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't sync repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
|
||||
data = request.json
|
||||
|
||||
if data is None:
|
||||
log.error("Can't create repository, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
if not "remote_repository" in data:
|
||||
log.error("Can't create repository, parameter 'remote_repository' missing")
|
||||
return {"error" : "Parameter 'remote_repository' missing"}, 400
|
||||
|
||||
|
||||
future = executor.submit(do_repo_sync, repo, data)
|
||||
task_id = add_task(future)
|
||||
|
||||
log.info("Starting synchronization of repository %s, task %s", repo, task_id, extra = {"repository" : repo, "task_id" : task_id})
|
||||
return {"status": "started", "task_id" : task_id}, 200
|
||||
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>/backup')
|
||||
class GitRepoBackup(Resource):
|
||||
def backup_repository(self, repo):
|
||||
"""
|
||||
Backup a specified repository.
|
||||
|
||||
Endpoint: POST /repositories/<repo>/backup
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to back up.
|
||||
|
||||
Request Body (JSON):
|
||||
ssh_port (int, optional): The SSH port to use for the backup. Defaults to 22.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response indicating the status of the backup operation.
|
||||
- If the repository is not found, returns a 404 error with a message.
|
||||
- If the request body is missing, returns a 400 error with a message.
|
||||
- If the backup process starts successfully, returns a 200 status with the task ID.
|
||||
|
||||
Notes:
|
||||
- The repository path is constructed by appending ".git" to the repository name.
|
||||
- The backup operation is performed asynchronously using a thread pool executor.
|
||||
- The task ID of the backup operation is generated using UUID and stored in a global tasks dictionary.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't backup repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
|
||||
data = request.json
|
||||
if data is None:
|
||||
log.error("Can't create repository, JSON post data missing")
|
||||
return {"error" : "Parameters missing"}, 400
|
||||
|
||||
|
||||
if not "ssh_port" in data:
|
||||
data["ssh_port"] = 22
|
||||
|
||||
|
||||
future = executor.submit(do_repo_backup, repo, data)
|
||||
task_id = add_task(future)
|
||||
|
||||
log.info("Starting backup of repository %s, task %s", repo, task_id, extra = {"repository" : repo, "task_id" : task_id})
|
||||
return {"status": "started", "task_id" : task_id}, 200
|
||||
|
||||
@git_ns.route('/repositories/<repo>/compact', methods=['POST'])
|
||||
class GitRepoCompact(Resource):
|
||||
def post(self, repo):
|
||||
"""
|
||||
Initiates a garbage collection (GC) process for a specified Git repository.
|
||||
|
||||
This endpoint triggers an asynchronous GC task for the given repository.
|
||||
The task is submitted to an executor, and a unique task ID is generated
|
||||
and returned to the client.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to perform GC on.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing the status of the request and
|
||||
a unique task ID if the repository is found, or an error
|
||||
message if the repository is not found.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't compact repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
future = executor.submit(do_repo_gc, repo)
|
||||
task_id = add_task(future)
|
||||
|
||||
log.info("Starting compaction of repository %s, task %s", repo, task_id, extra = {"repository" : repo, "task_id" : task_id})
|
||||
return {"status": "started", "task_id" : task_id}, 200
|
||||
|
||||
|
||||
@git_ns.route('/tasks/<task_id>/status')
|
||||
class GitTaskStatus(Resource):
|
||||
def get(self, task_id):
|
||||
"""
|
||||
Endpoint to check the status of a specific task.
|
||||
|
||||
Args:
|
||||
task_id (str): The unique identifier of the task.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing the status of the task.
|
||||
- If the task is not found, returns a 404 error with an error message.
|
||||
- If the task is completed, returns a 200 status with the result.
|
||||
- If the task is still in progress, returns a 202 status indicating the task is in progress.
|
||||
"""
|
||||
if not task_id in tasks:
|
||||
log.error("Task %s was not found", task_id, extra = {"task_id" : task_id})
|
||||
return {"error": "Task not found"}, 404
|
||||
|
||||
future = tasks[task_id]["future"]
|
||||
|
||||
try:
|
||||
if future.done():
|
||||
result = future.result()
|
||||
log.info("Returning completion of task %s", task_id, extra = {"task_id" : task_id})
|
||||
return {"status" : "completed", "result" : result}, 200
|
||||
else:
|
||||
log.info("Task %s is still in progress", task_id, extra = {"task_id" : task_id})
|
||||
return {"status" : "in progress"}, 202
|
||||
except Exception as e:
|
||||
errid = uuid.uuid4().hex
|
||||
|
||||
|
||||
log.error("Task %s failed with exception %s, UUID %s", task_id, traceback.format_exception(e), errid, extra = {"task_id" : task_id, "exception" : traceback.format_exception(e), "error_id" : errid})
|
||||
return {"status" : "internal error", "error_id" : errid }, 500
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>', methods=['DELETE'])
|
||||
class GitRepo(Resource):
|
||||
def delete(self, repo):
|
||||
"""
|
||||
Deletes a Git repository.
|
||||
|
||||
This endpoint deletes a Git repository specified by the `repo` parameter.
|
||||
If the repository does not exist, it returns a 404 error with a message
|
||||
indicating that the repository was not found. If the repository is successfully
|
||||
deleted, it returns a 200 status with a message indicating that the repository
|
||||
was deleted.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository to delete.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a status message and the appropriate HTTP status code.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't delete repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
|
||||
shutil.rmtree(repo_path)
|
||||
log.info("Deleted repository %s", repo, extra = {"repository" : repo})
|
||||
return {"status": "Repository deleted"}, 200
|
||||
|
||||
|
||||
|
||||
|
||||
@git_ns.route('/repositories/<repo>/branches')
|
||||
class GitRepoBranches(Resource):
|
||||
def get(self, repo):
|
||||
"""
|
||||
Retrieve the list of branches for a given repository.
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "branches" key containing a list of branch names.
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" if the repository does not exist.
|
||||
"""
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get branches of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
|
||||
branches = []
|
||||
for branch in git_repo.branches:
|
||||
branches = branches + [branch.name]
|
||||
|
||||
log.info("Returning %i branches", len(branches))
|
||||
return {
|
||||
"branches": branches
|
||||
}
|
||||
|
||||
@git_ns.route('/repositories/<repo>/branches/<branch>')
|
||||
class GitRepoBranchesDeleter(Resource):
|
||||
def delete(self, repo, branch):
|
||||
"""Delete a given branch in a given repository
|
||||
|
||||
Args:
|
||||
repo (str): The name of the repository.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response containing a list of branch names or an error message if the repository is not found.
|
||||
- 200: A JSON object with a "status" key containing "deleted"
|
||||
- 404: A JSON object with an "error" key containing the message "Repository not found" or "Branch not found"
|
||||
"""
|
||||
|
||||
repo_path = os.path.join(REPOSITORIES_BASE_PATH, repo + ".git")
|
||||
if not os.path.isdir(repo_path):
|
||||
log.error("Can't get branches of repository repository %s, not found. Looked in %s", repo, repo_path, extra = {"repository" : repo, "path" : repo_path })
|
||||
return {"error": "Repository not found"}, 404
|
||||
|
||||
git_repo = git.Repo(repo_path)
|
||||
git_repo.git.config('--global', '--add', 'safe.directory', repo_path)
|
||||
|
||||
|
||||
if not branch in git_repo.branches:
|
||||
log.error("Can't delete branch %s, not found in repository %s", branch, repo, extra = {"repository" : repo, "branch" : branch})
|
||||
return {"error": "Branch not found"}, 404
|
||||
|
||||
git_repo.delete_head(branch)
|
||||
log.info("Branch %s of repository %s deleted", branch, repo, extra = {"repository" : repo, "branch" : branch})
|
||||
return {"status": "deleted"}, 200
|
||||
|
||||
|
||||
@git_ns.route('/health')
|
||||
class GitHealth(Resource):
|
||||
def get(self):
|
||||
"""
|
||||
Health check endpoint.
|
||||
|
||||
This endpoint returns a JSON response indicating the health status of the application.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with a status key set to "OK". Currently it always returns
|
||||
a successful value, but this endpoint can still be used to check that the API is
|
||||
active and functional.
|
||||
|
||||
"""
|
||||
log.info("Health check endpoint called")
|
||||
return {
|
||||
"status": "OK"
|
||||
}
|
||||
|
||||
@git_ns.route('/status')
|
||||
class GitStatus(Resource):
|
||||
def get(self):
|
||||
"""
|
||||
Status check endpoint.
|
||||
|
||||
This endpoint returns a JSON response indicating the status of the application.
|
||||
|
||||
Returns:
|
||||
Response: A JSON response with status information
|
||||
|
||||
"""
|
||||
log.info("Status endpoint called")
|
||||
|
||||
return {
|
||||
"uptime" : time.time() - start_time,
|
||||
"active_tasks" : len(tasks)
|
||||
}
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
|
@ -0,0 +1,43 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Source: <url://example.com>
|
||||
Upstream-Name: ogboot
|
||||
Upstream-Contact: <preferred name and address to reach the upstream project>
|
||||
|
||||
Files:
|
||||
*
|
||||
Copyright:
|
||||
<years> <put author's name and email here>
|
||||
<years> <likewise for another author>
|
||||
License: GPL-3.0+
|
||||
|
||||
Files:
|
||||
debian/*
|
||||
Copyright:
|
||||
2025 vagrant <vagrant@build>
|
||||
License: GPL-3.0+
|
||||
|
||||
License: GPL-3.0+
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
.
|
||||
This package is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
Comment:
|
||||
On Debian systems, the complete text of the GNU General
|
||||
Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
|
||||
|
||||
# Please also look if there are files or directories which have a
|
||||
# different copyright/license attached and list them here.
|
||||
# Please avoid picking licenses with terms that are more restrictive than the
|
||||
# packaged work, as it may make Debian's contributions unacceptable upstream.
|
||||
#
|
||||
# If you need, there are some extra license texts available in two places:
|
||||
# /usr/share/debhelper/dh_make/licenses/
|
||||
# /usr/share/common-licenses/
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/make -f
|
||||
|
||||
# See debhelper(7) (uncomment to enable).
|
||||
# Output every command that modifies files on the build system.
|
||||
#export DH_VERBOSE = 1
|
||||
|
||||
|
||||
# See FEATURE AREAS in dpkg-buildflags(1).
|
||||
#export DEB_BUILD_MAINT_OPTIONS = hardening=+all
|
||||
|
||||
# See ENVIRONMENT in dpkg-buildflags(1).
|
||||
# Package maintainers to append CFLAGS.
|
||||
#export DEB_CFLAGS_MAINT_APPEND = -Wall -pedantic
|
||||
# Package maintainers to append LDFLAGS.
|
||||
#export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
|
||||
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
# Ejecutar composer install durante la fase de construcción
|
||||
override_dh_auto_build:
|
||||
|
||||
|
||||
# dh_make generated override targets.
|
||||
# This is an example for Cmake (see <https://bugs.debian.org/641051>).
|
||||
#override_dh_auto_configure:
|
||||
# dh_auto_configure -- \
|
||||
# -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH)
|
|
@ -0,0 +1,33 @@
|
|||
#!/bin/bash
|
||||
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -f "/etc/apt/sources.list.d/opengnsys.sources" ] ; then
|
||||
|
||||
cat > /etc/apt/sources.list.d/opengnsys.sources <<HERE
|
||||
Types: deb
|
||||
URIs: https://ognproject.evlt.uma.es/debian-opengnsys/
|
||||
Suites: noble
|
||||
Components: main
|
||||
Signed-By:
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
.
|
||||
mDMEZzx/SxYJKwYBBAHaRw8BAQdAa83CuAJ5/+7Pn9LHT/k34EAGpx5FnT/ExHSj
|
||||
XZG1JES0Ik9wZW5HbnN5cyA8b3Blbmduc3lzQG9wZW5nbnN5cy5lcz6ImQQTFgoA
|
||||
QRYhBC+J38Xsso227ZbDVt2S5xJQRhKDBQJnPH9LAhsDBQkFo5qABQsJCAcCAiIC
|
||||
BhUKCQgLAgQWAgMBAh4HAheAAAoJEN2S5xJQRhKDW/MBAO6swnpwdrbm48ypMyPh
|
||||
NboxvF7rCqBqHWwRHvkvrq7pAP9zd98r7z2AvqVXZxnaCsLTUNMEL12+DVZAUZ1G
|
||||
EquRBbg4BGc8f0sSCisGAQQBl1UBBQEBB0B6D6tkrwXSHi7ebGYsiMPntqwdkQ/S
|
||||
84SFTlSxRqdXfgMBCAeIfgQYFgoAJhYhBC+J38Xsso227ZbDVt2S5xJQRhKDBQJn
|
||||
PH9LAhsMBQkFo5qAAAoJEN2S5xJQRhKDJ+cBAM9jYbeq5VXkHLfODeVztgSXnSUe
|
||||
yklJ18oQmpeK5eWeAQDKYk/P0R+1ZJDItxkeP6pw62bCDYGQDvdDGPMAaIT6CA==
|
||||
=xcNc
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
HERE
|
||||
fi
|
||||
|
||||
|
||||
apt update
|
||||
apt install -y python3-git opengnsys-libarchive-c python3-termcolor python3-requests python3-tqdm bsdextrautils python3-paramiko python3-aniso8601 opengnsys-flask-restx opengnsys-flask-executor python3-flask python3-psutil
|
|
@ -0,0 +1,11 @@
|
|||
[Service]
|
||||
RestartSec=10s
|
||||
Type=simple
|
||||
User=oggit
|
||||
Group=oggit
|
||||
WorkingDirectory=/opt/opengnsys/ogrepository/oggit/api/
|
||||
ExecStart=/usr/bin/gunicorn -w 4 -b 0.0.0.0:8006 api_server:app
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -13,7 +13,7 @@ Descarga, instala y configura Forgejo, crea los repositorios por defecto, y conf
|
|||
Crear el archivo `/etc/apt/sources.list.d/opengnsys.sources` con este contenido:
|
||||
|
||||
Types: deb
|
||||
URIs: https://ognproject.evlt.uma.es/debian-opengnsys/
|
||||
URIs: https://ognproject.evlt.uma.es/debian-opengnsys/opengnsys
|
||||
Suites: noble
|
||||
Components: main
|
||||
Signed-By:
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
[Service]
|
||||
RestartSec=10s
|
||||
Type=simple
|
||||
User={gitapi_user}
|
||||
Group={gitapi_group}
|
||||
WorkingDirectory={gitapi_work_path}
|
||||
ExecStart=/usr/bin/gunicorn -w 4 -b {gitapi_host}:{gitapi_port} gitapi:app
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ ! -f "/etc/apt/sources.list.d/opengnsys.sources" ] ; then
|
||||
|
||||
cat > /etc/apt/sources.list.d/opengnsys.sources <<HERE
|
||||
Types: deb
|
||||
URIs: https://ognproject.evlt.uma.es/debian-opengnsys/opengnsys
|
||||
Suites: noble
|
||||
Components: main
|
||||
Signed-By:
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
.
|
||||
mDMEZzx/SxYJKwYBBAHaRw8BAQdAa83CuAJ5/+7Pn9LHT/k34EAGpx5FnT/ExHSj
|
||||
XZG1JES0Ik9wZW5HbnN5cyA8b3Blbmduc3lzQG9wZW5nbnN5cy5lcz6ImQQTFgoA
|
||||
QRYhBC+J38Xsso227ZbDVt2S5xJQRhKDBQJnPH9LAhsDBQkFo5qABQsJCAcCAiIC
|
||||
BhUKCQgLAgQWAgMBAh4HAheAAAoJEN2S5xJQRhKDW/MBAO6swnpwdrbm48ypMyPh
|
||||
NboxvF7rCqBqHWwRHvkvrq7pAP9zd98r7z2AvqVXZxnaCsLTUNMEL12+DVZAUZ1G
|
||||
EquRBbg4BGc8f0sSCisGAQQBl1UBBQEBB0B6D6tkrwXSHi7ebGYsiMPntqwdkQ/S
|
||||
84SFTlSxRqdXfgMBCAeIfgQYFgoAJhYhBC+J38Xsso227ZbDVt2S5xJQRhKDBQJn
|
||||
PH9LAhsMBQkFo5qAAAoJEN2S5xJQRhKDJ+cBAM9jYbeq5VXkHLfODeVztgSXnSUe
|
||||
yklJ18oQmpeK5eWeAQDKYk/P0R+1ZJDItxkeP6pw62bCDYGQDvdDGPMAaIT6CA==
|
||||
=xcNc
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
HERE
|
||||
fi
|
||||
|
||||
|
||||
apt update
|
||||
apt install -y python3-git opengnsys-libarchive-c python3-termcolor python3-requests python3-tqdm bsdextrautils
|
|
@ -31,7 +31,7 @@ import datetime
|
|||
import tqdm
|
||||
|
||||
#FORGEJO_VERSION="8.0.3"
|
||||
FORGEJO_VERSION="9.0.3"
|
||||
FORGEJO_VERSION="10.0.3"
|
||||
FORGEJO_URL=f"https://codeberg.org/forgejo/forgejo/releases/download/v{FORGEJO_VERSION}/forgejo-{FORGEJO_VERSION}-linux-amd64"
|
||||
|
||||
|
||||
|
@ -213,16 +213,42 @@ class OpengnsysGitInstaller:
|
|||
self.git_basedir = "base.git"
|
||||
self.email = "OpenGnsys@opengnsys.com"
|
||||
|
||||
self.opengnsys_bin_path = os.path.join(self.base_path, "bin")
|
||||
self.opengnsys_etc_path = os.path.join(self.base_path, "etc")
|
||||
|
||||
self.forgejo_user = "oggit"
|
||||
self.forgejo_password = "opengnsys"
|
||||
self.forgejo_organization = "opengnsys"
|
||||
self.forgejo_port = 3000
|
||||
|
||||
self.forgejo_bin_path = os.path.join(self.ogrepository_base_path, "bin")
|
||||
self.forgejo_exe = os.path.join(self.forgejo_bin_path, "forgejo")
|
||||
self.forgejo_conf_dir_path = os.path.join(self.ogrepository_base_path, "etc", "forgejo")
|
||||
|
||||
self.lfs_dir_path = os.path.join(self.ogrepository_base_path, "oggit", "git-lfs")
|
||||
self.git_dir_path = os.path.join(self.ogrepository_base_path, "oggit", "git")
|
||||
|
||||
self.forgejo_var_dir_path = os.path.join(self.ogrepository_base_path, "var", "lib", "forgejo")
|
||||
self.forgejo_work_dir_path = os.path.join(self.forgejo_var_dir_path, "work")
|
||||
self.forgejo_work_custom_dir_path = os.path.join(self.forgejo_work_dir_path, "custom")
|
||||
self.forgejo_db_dir_path = os.path.join(self.forgejo_var_dir_path, "db")
|
||||
self.forgejo_data_dir_path = os.path.join(self.forgejo_var_dir_path, "data")
|
||||
|
||||
self.forgejo_db_path = os.path.join(self.forgejo_db_dir_path, "forgejo.db")
|
||||
|
||||
self.forgejo_log_dir_path = os.path.join(self.ogrepository_base_path, "log", "forgejo")
|
||||
|
||||
|
||||
self.dependencies = ["git", "python3-flask", "python3-flasgger", "gunicorn", ]
|
||||
|
||||
self.set_ssh_user_group("oggit", "oggit")
|
||||
|
||||
self.temp_dir = None
|
||||
self.script_path = os.path.realpath(os.path.dirname(__file__))
|
||||
|
||||
# Where we look for forgejo-app.ini and similar templates.
|
||||
self.template_path = self.script_path
|
||||
|
||||
# Possible names for SSH public keys
|
||||
self.ssh_key_users = ["root", "opengnsys"]
|
||||
self.key_names = ["id_rsa.pub", "id_ed25519.pub", "id_ecdsa.pub", "id_ed25519_sk.pub", "id_ecdsa_sk.pub"]
|
||||
|
@ -240,6 +266,9 @@ class OpengnsysGitInstaller:
|
|||
self.oglive = Oglive()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def set_testmode(self, value):
|
||||
"""Establece el modo de prueba"""
|
||||
self.testmode = value
|
||||
|
@ -700,7 +729,7 @@ class OpengnsysGitInstaller:
|
|||
self.verify_requirements()
|
||||
|
||||
self.__logger.debug("Installing dependencies")
|
||||
subprocess.run(["apt-get", "install", "-y", "git"], check=True)
|
||||
subprocess.run(["apt-get", "install", "-y"] + self.dependencies, check=True)
|
||||
|
||||
def _install_template(self, template, destination, keysvalues):
|
||||
|
||||
|
@ -711,7 +740,10 @@ class OpengnsysGitInstaller:
|
|||
data = template_file.read()
|
||||
|
||||
for key in keysvalues.keys():
|
||||
data = data.replace("{" + key + "}", keysvalues[key])
|
||||
if isinstance(keysvalues[key], int):
|
||||
data = data.replace("{" + key + "}", str(keysvalues[key]))
|
||||
else:
|
||||
data = data.replace("{" + key + "}", keysvalues[key])
|
||||
|
||||
with open(destination, "w+", encoding="utf-8") as out_file:
|
||||
out_file.write(data)
|
||||
|
@ -722,98 +754,112 @@ class OpengnsysGitInstaller:
|
|||
ret = subprocess.run(cmd, check=True,capture_output=True, encoding='utf-8')
|
||||
return ret.stdout.strip()
|
||||
|
||||
def install_forgejo(self):
|
||||
self.__logger.info("Installing Forgejo version %s", FORGEJO_VERSION)
|
||||
|
||||
|
||||
|
||||
def install_api(self):
|
||||
self.__logger.info("Installing Git API")
|
||||
|
||||
opengnsys_bin_path = os.path.join(self.base_path, "bin")
|
||||
opengnsys_etc_path = os.path.join(self.base_path, "etc")
|
||||
|
||||
forgejo_bin_path = os.path.join(self.ogrepository_base_path, "bin")
|
||||
bin_path = os.path.join(forgejo_bin_path, "forgejo")
|
||||
conf_dir_path = os.path.join(self.ogrepository_base_path, "etc", "forgejo")
|
||||
pathlib.Path(opengnsys_bin_path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
lfs_dir_path = os.path.join(self.ogrepository_base_path, "oggit", "git-lfs")
|
||||
git_dir_path = os.path.join(self.ogrepository_base_path, "oggit", "git")
|
||||
data = {
|
||||
"gitapi_user" : "opengnsys",
|
||||
"gitapi_group" : "opengnsys",
|
||||
"gitapi_host" : "0.0.0.0",
|
||||
"gitapi_port" : 8087,
|
||||
"gitapi_work_path" : opengnsys_bin_path
|
||||
}
|
||||
|
||||
forgejo_work_dir_path = os.path.join(self.ogrepository_base_path, "var", "lib", "forgejo/work")
|
||||
forgejo_db_dir_path = os.path.join(self.ogrepository_base_path, "var", "lib", "forgejo/db")
|
||||
forgejo_data_dir_path = os.path.join(self.ogrepository_base_path, "var", "lib", "forgejo/data")
|
||||
shutil.copy("../api/gitapi.py", opengnsys_bin_path + "/gitapi.py")
|
||||
shutil.copy("opengnsys_git_installer.py", opengnsys_bin_path + "/opengnsys_git_installer.py")
|
||||
|
||||
forgejo_db_path = os.path.join(forgejo_db_dir_path, "forgejo.db")
|
||||
|
||||
forgejo_log_dir_path = os.path.join(self.ogrepository_base_path, "log", "forgejo")
|
||||
self._install_template(os.path.join(self.template_path, "gitapi.service"), "/etc/systemd/system/gitapi.service", data)
|
||||
|
||||
|
||||
conf_path = os.path.join(conf_dir_path, "app.ini")
|
||||
|
||||
self.__logger.debug("Reloading systemd and starting service")
|
||||
subprocess.run(["systemctl", "daemon-reload"], check=True)
|
||||
subprocess.run(["systemctl", "enable", "gitapi"], check=True)
|
||||
subprocess.run(["systemctl", "restart", "gitapi"], check=True)
|
||||
|
||||
|
||||
def _get_forgejo_data(self):
|
||||
conf_path = os.path.join(self.forgejo_conf_dir_path, "app.ini")
|
||||
|
||||
data = {
|
||||
"forgejo_user" : self.ssh_user,
|
||||
"forgejo_group" : self.ssh_group,
|
||||
"forgejo_port" : str(self.forgejo_port),
|
||||
"forgejo_bin" : self.forgejo_exe,
|
||||
"forgejo_app_ini" : conf_path,
|
||||
"forgejo_work_path" : self.forgejo_work_dir_path,
|
||||
"forgejo_data_path" : self.forgejo_data_dir_path,
|
||||
"forgejo_db_path" : self.forgejo_db_path,
|
||||
"forgejo_repository_root" : self.git_dir_path,
|
||||
"forgejo_lfs_path" : self.lfs_dir_path,
|
||||
"forgejo_log_path" : self.forgejo_log_dir_path,
|
||||
"forgejo_hostname" : self._runcmd("hostname"),
|
||||
"forgejo_lfs_jwt_secret" : self._runcmd([self.forgejo_exe,"generate", "secret", "LFS_JWT_SECRET"]),
|
||||
"forgejo_jwt_secret" : self._runcmd([self.forgejo_exe,"generate", "secret", "JWT_SECRET"]),
|
||||
"forgejo_internal_token" : self._runcmd([self.forgejo_exe,"generate", "secret", "INTERNAL_TOKEN"]),
|
||||
"forgejo_secret_key" : self._runcmd([self.forgejo_exe,"generate", "secret", "SECRET_KEY"])
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
def install_forgejo(self, download=True):
|
||||
self.__logger.info("Installing Forgejo version %s", FORGEJO_VERSION)
|
||||
|
||||
conf_path = os.path.join(self.forgejo_conf_dir_path, "app.ini")
|
||||
|
||||
self.__logger.info("Stopping opengnsys-forgejo service. This may cause a harmless warning.")
|
||||
|
||||
subprocess.run(["/usr/bin/systemctl", "stop", "opengnsys-forgejo"], check=False)
|
||||
|
||||
self.__logger.debug("Downloading from %s into %s", FORGEJO_URL, bin_path)
|
||||
pathlib.Path(forgejo_bin_path).mkdir(parents=True, exist_ok=True)
|
||||
self.__logger.debug("Downloading from %s into %s", FORGEJO_URL, self.forgejo_exe)
|
||||
pathlib.Path(self.forgejo_bin_path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(bin_path, "wb") as forgejo_bin:
|
||||
with open(self.forgejo_exe, "wb") as forgejo_bin:
|
||||
download_with_progress(FORGEJO_URL, forgejo_bin)
|
||||
|
||||
os.chmod(bin_path, 0o755)
|
||||
os.chmod(self.forgejo_exe, 0o755)
|
||||
|
||||
if os.path.exists(forgejo_db_path):
|
||||
if os.path.exists(self.forgejo_db_path):
|
||||
self.__logger.debug("Removing old configuration")
|
||||
os.unlink(forgejo_db_path)
|
||||
os.unlink(self.forgejo_db_path)
|
||||
else:
|
||||
self.__logger.debug("Old configuration not present, ok.")
|
||||
|
||||
self.__logger.debug("Wiping old data")
|
||||
for dir in [conf_dir_path, git_dir_path, lfs_dir_path, forgejo_work_dir_path, forgejo_data_dir_path, forgejo_db_dir_path]:
|
||||
for dir in [self.forgejo_conf_dir_path, self.git_dir_path, self.lfs_dir_path, self.forgejo_work_dir_path, self.forgejo_data_dir_path, self.forgejo_db_dir_path]:
|
||||
if os.path.exists(dir):
|
||||
self.__logger.debug("Removing %s", dir)
|
||||
shutil.rmtree(dir)
|
||||
|
||||
self.__logger.debug("Creating directories")
|
||||
|
||||
pathlib.Path(opengnsys_etc_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(conf_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(git_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(lfs_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(forgejo_work_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(forgejo_data_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(forgejo_db_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(forgejo_log_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.opengnsys_etc_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_conf_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.git_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.lfs_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_work_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_data_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_db_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_log_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
os.chown(lfs_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(git_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(forgejo_data_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(forgejo_work_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(forgejo_db_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(forgejo_log_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.lfs_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.git_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_data_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_work_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_db_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_log_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
|
||||
data = {
|
||||
"forgejo_user" : self.ssh_user,
|
||||
"forgejo_group" : self.ssh_group,
|
||||
"forgejo_port" : str(self.forgejo_port),
|
||||
"forgejo_bin" : bin_path,
|
||||
"forgejo_app_ini" : conf_path,
|
||||
"forgejo_work_path" : forgejo_work_dir_path,
|
||||
"forgejo_data_path" : forgejo_data_dir_path,
|
||||
"forgejo_db_path" : forgejo_db_path,
|
||||
"forgejo_repository_root" : git_dir_path,
|
||||
"forgejo_lfs_path" : lfs_dir_path,
|
||||
"forgejo_log_path" : forgejo_log_dir_path,
|
||||
"forgejo_hostname" : self._runcmd("hostname"),
|
||||
"forgejo_lfs_jwt_secret" : self._runcmd([bin_path,"generate", "secret", "LFS_JWT_SECRET"]),
|
||||
"forgejo_jwt_secret" : self._runcmd([bin_path,"generate", "secret", "JWT_SECRET"]),
|
||||
"forgejo_internal_token" : self._runcmd([bin_path,"generate", "secret", "INTERNAL_TOKEN"]),
|
||||
"forgejo_secret_key" : self._runcmd([bin_path,"generate", "secret", "SECRET_KEY"])
|
||||
}
|
||||
data = self._get_forgejo_data()
|
||||
|
||||
self._install_template(os.path.join(self.script_path, "forgejo-app.ini"), conf_path, data)
|
||||
self._install_template(os.path.join(self.script_path, "forgejo.service"), "/etc/systemd/system/opengnsys-forgejo.service", data)
|
||||
self._install_template(os.path.join(self.template_path, "forgejo-app.ini"), conf_path, data)
|
||||
self._install_template(os.path.join(self.template_path, "opengnsys-forgejo.service"), "/etc/systemd/system/opengnsys-forgejo.service", data)
|
||||
|
||||
|
||||
self.__logger.debug("Reloading systemd and starting service")
|
||||
|
@ -828,7 +874,7 @@ class OpengnsysGitInstaller:
|
|||
self.__logger.info("Configuring forgejo")
|
||||
|
||||
def run_forge_cmd(args):
|
||||
cmd = [bin_path, "--config", conf_path] + args
|
||||
cmd = [self.forgejo_exe, "--config", conf_path] + args
|
||||
self.__logger.debug("Running command: %s", cmd)
|
||||
|
||||
ret = subprocess.run(cmd, check=False, capture_output=True, encoding='utf-8', user=self.ssh_user)
|
||||
|
@ -849,6 +895,81 @@ class OpengnsysGitInstaller:
|
|||
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "w+", encoding='utf-8') as token_file:
|
||||
token_file.write(token)
|
||||
|
||||
def configure_forgejo(self):
|
||||
data = self._get_forgejo_data()
|
||||
self.__logger.debug("Creating directories")
|
||||
|
||||
pathlib.Path(self.opengnsys_etc_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_conf_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.git_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.lfs_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_work_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_work_custom_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_data_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_db_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
pathlib.Path(self.forgejo_log_dir_path).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
os.chown(self.lfs_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.git_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_data_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_work_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_db_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
os.chown(self.forgejo_log_dir_path, self.ssh_uid, self.ssh_gid)
|
||||
|
||||
|
||||
|
||||
conf_path = os.path.join(self.forgejo_conf_dir_path, "app.ini")
|
||||
self._install_template(os.path.join(self.template_path, "forgejo-app.ini"), conf_path, data)
|
||||
self._install_template(os.path.join(self.template_path, "opengnsys-forgejo.service"), "/etc/systemd/system/opengnsys-forgejo.service", data)
|
||||
|
||||
|
||||
|
||||
self.__logger.debug("Reloading systemd and starting service")
|
||||
subprocess.run(["systemctl", "daemon-reload"], check=True)
|
||||
subprocess.run(["systemctl", "enable", "opengnsys-forgejo"], check=True)
|
||||
|
||||
subprocess.run(["systemctl", "restart", "opengnsys-forgejo"], check=True)
|
||||
|
||||
self.__logger.info("Waiting for forgejo to start")
|
||||
self._wait_for_port("localhost", self.forgejo_port)
|
||||
|
||||
|
||||
self.__logger.info("Configuring forgejo")
|
||||
|
||||
def run_forge_cmd(args, ignore_errors = []):
|
||||
cmd = [self.forgejo_exe, "--config", conf_path] + args
|
||||
self.__logger.info("Running command: %s", cmd)
|
||||
|
||||
ret = subprocess.run(cmd, check=False, capture_output=True, encoding='utf-8', user=self.ssh_user)
|
||||
if ret.returncode == 0:
|
||||
return ret.stdout.strip()
|
||||
else:
|
||||
self.__logger.error("Failed to run command: %s, return code %i", cmd, ret.returncode)
|
||||
self.__logger.error("stdout: %s", ret.stdout.strip())
|
||||
self.__logger.error("stderr: %s", ret.stderr.strip())
|
||||
|
||||
for err in ignore_errors:
|
||||
if err in ret.stderr:
|
||||
self.__logger.info("Ignoring error, it's in the ignore list")
|
||||
return ret.stdout.strip()
|
||||
|
||||
raise RuntimeError("Failed to run necessary command")
|
||||
|
||||
run_forge_cmd(["migrate"])
|
||||
|
||||
run_forge_cmd(["admin", "doctor", "check"])
|
||||
|
||||
run_forge_cmd(["admin", "user", "create", "--username", self.forgejo_user, "--password", self.forgejo_password, "--email", self.email], ignore_errors=["user already exists"])
|
||||
|
||||
token = run_forge_cmd(["admin", "user", "generate-access-token", "--username", self.forgejo_user, "-t", "gitapi", "--scopes", "all", "--raw"], ignore_errors = ["access token name has been used already"])
|
||||
|
||||
if token:
|
||||
with open(os.path.join(self.base_path, "etc", "ogGitApiToken.cfg"), "w+", encoding='utf-8') as token_file:
|
||||
token_file.write(token)
|
||||
else:
|
||||
self.__logger.info("Keeping the old token")
|
||||
|
||||
|
||||
def add_forgejo_repo(self, repository_name, description = ""):
|
||||
token = ""
|
||||
|
@ -943,6 +1064,16 @@ if __name__ == '__main__':
|
|||
logger.addHandler(fileLog)
|
||||
|
||||
|
||||
if "postinst" in os.path.basename(__file__):
|
||||
logger.info("Running as post-install script")
|
||||
installer=OpengnsysGitInstaller()
|
||||
|
||||
# Templates get installed here
|
||||
installer.template_path = "/usr/share/opengnsys-forgejo/"
|
||||
installer.configure_forgejo()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="OpenGnsys Installer",
|
||||
description="Script para la instalación del repositorio git",
|
||||
|
@ -1015,6 +1146,7 @@ if __name__ == '__main__':
|
|||
installer.get_image_paths(oglive_num = args.oglive)
|
||||
else:
|
||||
installer.install_dependencies()
|
||||
installer.install_api()
|
||||
installer.install_forgejo()
|
||||
|
||||
installer.add_forgejo_repo("windows", "Windows")
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
opengnsys-forgejo (0.5) UNRELEASED; urgency=medium
|
||||
|
||||
* Initial release.
|
||||
|
||||
-- OpenGnsys <opengnsys@opengnsys.es> Fri, 14 Mar 2025 08:40:35 +0100
|
|
@ -0,0 +1,37 @@
|
|||
Source: opengnsys-forgejo
|
||||
Section: unknown
|
||||
Priority: optional
|
||||
Maintainer: OpenGnsys <opengnsys@opengnsys.es>
|
||||
Rules-Requires-Root: no
|
||||
Build-Depends:
|
||||
debhelper-compat (= 13),
|
||||
Standards-Version: 4.6.2
|
||||
Homepage: https://opengnsys.es
|
||||
#Vcs-Browser: https://salsa.debian.org/debian/ogboot
|
||||
#Vcs-Git: https://salsa.debian.org/debian/ogboot.git
|
||||
|
||||
Package: opengnsys-forgejo
|
||||
Architecture: any
|
||||
Multi-Arch: foreign
|
||||
Depends:
|
||||
${shlibs:Depends},
|
||||
${misc:Depends},
|
||||
bsdextrautils,
|
||||
debconf (>= 1.5.0),
|
||||
gunicorn,
|
||||
opengnsys-flask-executor,
|
||||
opengnsys-flask-restx,
|
||||
opengnsys-libarchive-c,
|
||||
python3,
|
||||
python3-aniso8601,
|
||||
python3-flasgger,
|
||||
python3-flask,
|
||||
python3-flask,
|
||||
python3-git,
|
||||
python3-paramiko,
|
||||
python3-requests,
|
||||
python3-termcolor,
|
||||
python3-tqdm
|
||||
Conflicts:
|
||||
Description: Opengnsys Forgejo package for OgGit
|
||||
Files for OpenGnsys Git support
|
|
@ -0,0 +1,43 @@
|
|||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Source: <url://example.com>
|
||||
Upstream-Name: ogboot
|
||||
Upstream-Contact: <preferred name and address to reach the upstream project>
|
||||
|
||||
Files:
|
||||
*
|
||||
Copyright:
|
||||
<years> <put author's name and email here>
|
||||
<years> <likewise for another author>
|
||||
License: GPL-3.0+
|
||||
|
||||
Files:
|
||||
debian/*
|
||||
Copyright:
|
||||
2025 vagrant <vagrant@build>
|
||||
License: GPL-3.0+
|
||||
|
||||
License: GPL-3.0+
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
.
|
||||
This package is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
Comment:
|
||||
On Debian systems, the complete text of the GNU General
|
||||
Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
|
||||
|
||||
# Please also look if there are files or directories which have a
|
||||
# different copyright/license attached and list them here.
|
||||
# Please avoid picking licenses with terms that are more restrictive than the
|
||||
# packaged work, as it may make Debian's contributions unacceptable upstream.
|
||||
#
|
||||
# If you need, there are some extra license texts available in two places:
|
||||
# /usr/share/debhelper/dh_make/licenses/
|
||||
# /usr/share/common-licenses/
|
|
@ -0,0 +1,2 @@
|
|||
opengnsys-forgejo_0.5_amd64.buildinfo unknown optional
|
||||
opengnsys-forgejo_0.5_amd64.deb unknown optional
|
|
@ -0,0 +1,2 @@
|
|||
/opt/opengnsys/oggit/bin
|
||||
/opt/opengnsys/ogrepository/etc/forgejo/
|
|
@ -0,0 +1,3 @@
|
|||
forgejo /opt/opengnsys/ogrepository/bin
|
||||
forgejo-app.ini /usr/share/opengnsys-forgejo/
|
||||
opengnsys-forgejo.service /usr/share/opengnsys-forgejo/
|
|
@ -0,0 +1,2 @@
|
|||
misc:Depends=
|
||||
misc:Pre-Depends=
|
|
@ -0,0 +1,25 @@
|
|||
Template: opengnsys/forgejo_organization
|
||||
Type: string
|
||||
Default: opegnsys
|
||||
Description: Organizacion de Forgejo
|
||||
|
||||
Template: opengnsys/forgejo_user
|
||||
Type: string
|
||||
Default: oggit
|
||||
Description: Usuario de oggit Forgejo
|
||||
|
||||
Template: opengnsys/forgejo_password
|
||||
Type: password
|
||||
Default: opegnsys
|
||||
Description: Password de cuenta de oggit de Forgejo
|
||||
|
||||
Template: opengnsys/forgejo_email
|
||||
Type: string
|
||||
Default: opegnsys@opengnsys.com
|
||||
Description: Email de cuenta de oggit de Forgejo
|
||||
|
||||
Template: opengnsys/forgejo_port
|
||||
Type: string
|
||||
Default: 3000
|
||||
Description: Puerto TCP de Forgejo
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
data = {
|
||||
"forgejo_user" : self.ssh_user,
|
||||
"forgejo_group" : self.ssh_group,
|
||||
"forgejo_port" : str(self.forgejo_port),
|
||||
"forgejo_bin" : bin_path,
|
||||
"forgejo_app_ini" : conf_path,
|
||||
"forgejo_work_path" : forgejo_work_dir_path,
|
||||
"forgejo_data_path" : forgejo_data_dir_path,
|
||||
"forgejo_db_path" : forgejo_db_path,
|
||||
"forgejo_repository_root" : git_dir_path,
|
||||
"forgejo_lfs_path" : lfs_dir_path,
|
||||
"forgejo_log_path" : forgejo_log_dir_path,
|
||||
"forgejo_hostname" : _runcmd("hostname"),
|
||||
"forgejo_lfs_jwt_secret" : _runcmd([bin_path,"generate", "secret", "LFS_JWT_SECRET"]),
|
||||
"forgejo_jwt_secret" : _runcmd([bin_path,"generate", "secret", "JWT_SECRET"]),
|
||||
"forgejo_internal_token" : _runcmd([bin_path,"generate", "secret", "INTERNAL_TOKEN"]),
|
||||
"forgejo_secret_key" : _runcmd([bin_path,"generate", "secret", "SECRET_KEY"])
|
||||
}
|
||||
|
||||
ini_template = "/usr/share/opengnsys-forgejo/forgejo-app.ini"
|
||||
|
||||
|
||||
def _install_template(self, template, destination, keysvalues):
|
||||
data = ""
|
||||
with open(template, "r", encoding="utf-8") as template_file:
|
||||
data = template_file.read()
|
||||
|
||||
for key in keysvalues.keys():
|
||||
if isinstance(keysvalues[key], int):
|
||||
data = data.replace("{" + key + "}", str(keysvalues[key]))
|
||||
else:
|
||||
data = data.replace("{" + key + "}", keysvalues[key])
|
||||
|
||||
with open(destination, "w+", encoding="utf-8") as out_file:
|
||||
out_file.write(data)
|
||||
|
||||
|
||||
|
||||
|
||||
_install_template(os.path.join(self.script_path, "forgejo-app.ini"), conf_path, data)
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
#!/usr/bin/make -f
|
||||
|
||||
# See debhelper(7) (uncomment to enable).
|
||||
# Output every command that modifies files on the build system.
|
||||
#export DH_VERBOSE = 1
|
||||
|
||||
|
||||
# See FEATURE AREAS in dpkg-buildflags(1).
|
||||
#export DEB_BUILD_MAINT_OPTIONS = hardening=+all
|
||||
|
||||
# See ENVIRONMENT in dpkg-buildflags(1).
|
||||
# Package maintainers to append CFLAGS.
|
||||
#export DEB_CFLAGS_MAINT_APPEND = -Wall -pedantic
|
||||
# Package maintainers to append LDFLAGS.
|
||||
#export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
|
||||
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
|
||||
%:
|
||||
dh $@
|
||||
|
||||
# Ejecutar composer install durante la fase de construcción
|
||||
override_dh_auto_build:
|
||||
cp -v ../../installer/opengnsys_git_installer.py debian/opengnsys-forgejo.postinst
|
||||
|
||||
override_dh_auto_install:
|
||||
dh_auto_install
|
||||
mkdir -p debian/opengnsys-forgejo/opt/opengnsys/ogrepository/var/lib/forgejo
|
||||
mkdir -p debian/opengnsys-forgejo/opt/opengnsys/ogrepository/var/lib/forgejo/work
|
||||
chown -R oggit:oggit debian/opengnsys-forgejo/opt/opengnsys/ogrepository/var/lib/forgejo
|
||||
|
||||
|
||||
# dh_make generated override targets.
|
||||
# This is an example for Cmake (see <https://bugs.debian.org/641051>).
|
||||
#override_dh_auto_configure:
|
||||
# dh_auto_configure -- \
|
||||
# -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH)
|
|
@ -0,0 +1,6 @@
|
|||
#!/bin/bash
|
||||
VERSION=10.0.1
|
||||
|
||||
wget https://codeberg.org/forgejo/forgejo/releases/download/v${VERSION}/forgejo-${VERSION}-linux-amd64 -O forgejo
|
||||
chmod 755 forgejo
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
APP_NAME = OpenGnsys Git
|
||||
APP_SLOGAN =
|
||||
RUN_USER = {forgejo_user}
|
||||
WORK_PATH = {forgejo_work_path}
|
||||
RUN_MODE = prod
|
||||
|
||||
[database]
|
||||
DB_TYPE = sqlite3
|
||||
HOST = 127.0.0.1:3306
|
||||
NAME = forgejo
|
||||
USER = forgejo
|
||||
PASSWD =
|
||||
SCHEMA =
|
||||
SSL_MODE = disable
|
||||
PATH = {forgejo_db_path}
|
||||
LOG_SQL = false
|
||||
|
||||
[repository]
|
||||
ROOT = {forgejo_repository_root}
|
||||
|
||||
[server]
|
||||
SSH_DOMAIN = og-admin
|
||||
DOMAIN = og-admin
|
||||
HTTP_PORT = {forgejo_port}
|
||||
ROOT_URL = http://{forgejo_hostname}:{forgejo_port}/
|
||||
APP_DATA_PATH = {forgejo_data_path}
|
||||
DISABLE_SSH = false
|
||||
SSH_PORT = 22
|
||||
LFS_START_SERVER = true
|
||||
LFS_JWT_SECRET = {forgejo_lfs_jwt_secret}
|
||||
OFFLINE_MODE = true
|
||||
|
||||
[lfs]
|
||||
PATH = {forgejo_lfs_path}
|
||||
|
||||
[mailer]
|
||||
ENABLED = false
|
||||
|
||||
[service]
|
||||
REGISTER_EMAIL_CONFIRM = false
|
||||
ENABLE_NOTIFY_MAIL = false
|
||||
DISABLE_REGISTRATION = true
|
||||
ALLOW_ONLY_EXTERNAL_REGISTRATION = false
|
||||
ENABLE_CAPTCHA = false
|
||||
REQUIRE_SIGNIN_VIEW = false
|
||||
DEFAULT_KEEP_EMAIL_PRIVATE = false
|
||||
DEFAULT_ALLOW_CREATE_ORGANIZATION = true
|
||||
DEFAULT_ENABLE_TIMETRACKING = true
|
||||
NO_REPLY_ADDRESS = noreply.localhost
|
||||
|
||||
[openid]
|
||||
ENABLE_OPENID_SIGNIN = true
|
||||
ENABLE_OPENID_SIGNUP = true
|
||||
|
||||
[cron.update_checker]
|
||||
ENABLED = true
|
||||
|
||||
[session]
|
||||
PROVIDER = file
|
||||
|
||||
[log]
|
||||
MODE = console
|
||||
LEVEL = info
|
||||
ROOT_PATH = {forgejo_log_path} #/tmp/log
|
||||
|
||||
[repository.pull-request]
|
||||
DEFAULT_MERGE_STYLE = merge
|
||||
|
||||
[repository.signing]
|
||||
DEFAULT_TRUST_MODEL = committer
|
||||
|
||||
[security]
|
||||
INSTALL_LOCK = true
|
||||
INTERNAL_TOKEN = {forgejo_internal_token}
|
||||
PASSWORD_HASH_ALGO = pbkdf2_hi
|
||||
|
||||
[oauth2]
|
||||
JWT_SECRET = {forgejo_jwt_secret}
|
|
@ -0,0 +1,11 @@
|
|||
[Service]
|
||||
RestartSec=10s
|
||||
Type=simple
|
||||
User=oggit
|
||||
Group=oggit
|
||||
WorkingDirectory=/opt/opengnsys/ogrepository/var/lib/forgejo/work
|
||||
ExecStart=/opt/opengnsys/ogrepository/bin/forgejo web --config /opt/opengnsys/etc/forgejo/app.ini
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
Loading…
Reference in New Issue