Minor progress in wizzard

Signed-off-by: Václav Valíček <valicek1994@gmail.com>
This commit is contained in:
Václav Valíček 2022-08-06 02:11:44 +02:00
parent ebc5b68ddd
commit 92bf69f4f3
Signed by: valicek
GPG Key ID: FF05BDCA0C73BB31
5 changed files with 276 additions and 200 deletions

View File

@ -1,26 +1,4 @@
#!/bin/bash #!/bin/bash
set -euo pipefail
IFS=$'\n\t'
if [[ ! ":$PATH:" == *":/usr/local/bin:"* ]]
then
export PATH="/usr/local/bin:$PATH"
fi
# if started as root
if [ $UID -eq 0 ]
then
chown executor:executor /data
su executor -c $0
exit $?
fi
dir_prefix=cloner
function die(){
echo $@ 1>&2
exit 1
}
function checkProjectName(){ function checkProjectName(){
# check, if volume does not exist yet # check, if volume does not exist yet
@ -33,64 +11,6 @@ function checkProjectName(){
fi fi
} }
function createConfigFile(){
# creates config file, writes it to $1 location
loc=$1
# vars
local tmpl_name=$read_project_name
local tmpl_url=$read_url
local tmpl_interval=$read_interval
local tmpl_submodules=$read_submodules
if [ $read_submodule_limit = "N" ]
then
local tmpl_submodules_depth_enabled='# '
local tmpl_submodules_depth=50000
else
local tmpl_submodules_depth_enabled=''
local tmpl_submodules_depth=$read_submodule_limit
fi
cat > $loc <<-EOF
# cloner.cfg
# main config
# created at $(date +"%Y-%m-%d %X")
# main url - url of main repo - just to clone
cloner_repo_url=$tmpl_url
# project name (names of volumes are derrived from this
cloner_project_name=$tmpl_name
# cloner interval (in minutes, default=0 - run always)
cloner_interval=$tmpl_interval
# do you need submodules support? (1/0)
cloner_submodules=$tmpl_submodules
# max depth of submodule scan (default = unlimited, uncomment to use)
${tmpl_submodules_depth_enabled}cloner_submodule_depth=$tmpl_submodules_depth
EOF
}
function createDetectorConfig(){
# $1 - file
local cfgFile=$1
mkdir -p $(dirname $cfgFile)
if [ $read_detector -eq 1 ]
then
cat > $cfgFile <<-EOF
# this file is config for detector
# now, it is empty - to disable detector, just delete it!
EOF
fi
}
function generateSSHKey(){ function generateSSHKey(){
# generates ssh key with $1 path and $2 description # generates ssh key with $1 path and $2 description
local keyfile=$1/identity local keyfile=$1/identity
@ -120,55 +40,6 @@ function reuseSSHKey(){
ssh-keygen -y -f $keyfile -P "" || true # will fail in the end, so script will continue and clean up the mess ssh-keygen -y -f $keyfile -P "" || true # will fail in the end, so script will continue and clean up the mess
} }
# start reading vars
echo -n "Enter project name: "
read read_project_name
checkProjectName "$read_project_name"
# repository URL
echo -n "Enter git repository URL: "
read read_url
[ -n "$read_url" ] || die "Empty url is not allowed!"
# check interval
echo -n "Enter check interval in minutes [5]: "
read read_interval
[ -n "$read_interval" ] || read_interval=5
[[ "$read_interval" =~ ^[0-9]+$ ]] || echo "Entered interval is not number. Try again.."
# submodule use
echo -n "Mirror including submodules? [Y/n]"
read read_submodules
[ -n "$read_submodules" ] || read_submodules=Y
[[ "$read_submodules" =~ ^[Yy]$ ]] && read_submodules=1 || read_submodules=0
# submodule limit
if [ $read_submodules -eq 1 ]
then
echo -n "Limit for submodule discovery [<number>/N]: "
read read_submodule_limit
[ -n "$read_submodule_limit" ] || read_submodule_limit=N
if ! [[ "$read_submodule_limit" =~ ^[Nn]$ ]]
then
[[ "$read_submodule_limit" =~ ^[0-9]+$ ]] || die "Submodule limit must be n,N or number!"
fi
else
read_submodule_limit=N
fi
# determine CI?
echo -n "Do you want to enable CI support? (detector) [Y/n]"
read read_detector
[ -n "$read_detector" ] || read_detector=Y
if ! [[ "$read_detector" =~ ^[Yy]$ ]]
then
read_detector=0
else
read_detector=1
fi
root=/data/$dir_prefix-$read_project_name root=/data/$dir_prefix-$read_project_name
# start generating config # start generating config

View File

@ -0,0 +1,228 @@
#!/usr/bin/env python3
import logging
from repo_cloner.lib.logger_setup import log
import os
import pyinputplus as pyip
from typing import Optional, Callable
from pathlib import Path
# base dir
base_dir: Optional[Path] = None
cloner_prefix: str = "cloner-"
data: dict = {}
# determine starting user and devote UID/GID to unprivileged user - safety first :)
def check_privileges():
if os.getuid() == 0:
log.info(f"Running as root, downgrading permissions")
os.setgid(1000)
os.setuid(1000)
os.setegid(1000)
os.seteuid(1000)
log.info(f"New UID:GID: {os.getuid()}:{os.getgid()}")
def parse_args():
global base_dir
import argparse
# parse input arguments
parser = argparse.ArgumentParser(description = "repo-cloner initialization wizzard")
parser.add_argument('--base-dir', help = 'path to directory containing whole cloner structure', required = True,
default = None, type = str)
args = parser.parse_args()
base_dir = Path(args.base_dir)
def gen_config_file(
conf_dir: Path,
cloner_repo_url,
cloner_project_name,
cloner_interval,
cloner_submodules,
cloner_submodule_depth,
detector,
**kwargs
):
from datetime import datetime
log.info(f"Creating config files in {conf_dir.as_posix()}")
if not conf_dir.exists():
log.info(f"Creating config dir")
conf_dir.mkdir(parents = True)
log.info("Creating cloner.cfg")
conf_file = conf_dir.joinpath("cloner.cfg")
conf_file.write_text(
f"# cloner.cfg\n"
"# main config\n"
f"# created at {datetime.now().strftime('%Y-%m-%d %X')}\n\n"
"# main url - url of main repo - just to clone\n"
f"cloner_repo_url = {cloner_repo_url}\n\n"
"# project name (names of volumes are derrived from this\n"
f"cloner_project_name = {cloner_project_name}\n\n"
"# cloner interval (in minutes, default=0 - run always)\n"
f"cloner_interval = {cloner_interval}\n\n"
"# do you need submodules support? (1/0)\n"
f"cloner_submodules = {cloner_submodules}\n\n"
"# max depth of submodule scan (default = unlimited, uncomment to use)\n"
f"{'' if cloner_submodules else '# '}cloner_submodule_depth = {cloner_submodule_depth}\n\n"
)
if detector:
log.info("Creating detector.cfg")
conf_file = conf_dir.joinpath("detector.cfg")
conf_file.write_text(
"# this file is config for detector\n""
"# now, it is empty - to disable detector, just delete it!\n"
)
def check_project_name(name: str):
if not len(name):
raise Exception("Empty input is invalid input!")
target = os.path.join(base_dir, f"{cloner_prefix}{name}")
log.debug(f"Validating project name - path {target}")
if os.path.exists(target):
log.warning(f"Project name occupied: {target}")
raise Exception(f"Project name {name} is occupied by another project")
def check_url(name: str):
if not len(name):
raise Exception("You must input URL")
def input_default_str(query: str, default_value: str, validation: Optional[Callable[[str], None]] = None) -> str:
log.debug(f"Input query {query} with default {default_value}")
while True:
new_query = query
if len(default_value):
new_query += f" [{default_value}] "
else:
new_query += " "
ret = pyip.inputStr(new_query, blank = True, strip = True)
if ret == "":
log.debug(f"Empty query answer => using previous/default value")
ret = default_value
try:
if validation:
validation(ret)
except Exception as e:
log.warning(e.__str__())
default_value = ret
continue
log.debug("Query finished")
break
return ret
def input_default_int(query: str, default_value: int, validation: Optional[Callable[[int], None]] = None) -> int:
log.debug(f"Input query {query} with default {default_value}")
while True:
new_query = f"{query} [{default_value}] "
ret = pyip.inputInt(new_query, blank = True, strip = True, min = 0)
if not ret:
log.debug(f"Empty query answer => using previous/default value")
ret = default_value
try:
if validation:
validation(ret)
except Exception as e:
log.warning(e.__str__())
default_value = ret
continue
log.debug("Query finished")
break
return ret
def input_default_bool(query: str, default_value: bool) -> bool:
log.debug(f"Input query {query} with default {default_value}")
new_query = f"{query} [{'Y' if default_value else 'N'}] "
ret = pyip.inputYesNo(new_query, blank = True, strip = True)
if not len(ret):
log.debug(f"Empty query answer => using previous/default value")
ret = default_value
if ret == "yes":
ret = True
if ret == "no":
ret = False
log.debug("Query finished")
return ret
def query_repo_info() -> bool:
log.debug(f"Querying base info")
# project name
data["cloner_project_name"] = input_default_str(
"Enter project name:",
data["cloner_project_name"],
check_project_name)
# url
data["cloner_repo_url"] = input_default_str("Enter project url:", data["cloner_repo_url"], check_url)
# interval
data["cloner_interval"] = input_default_int("Enter sync interval:", data["cloner_interval"])
# submodule support
data["cloner_submodules"] = input_default_bool("Mirror including submodules? [y/n]:", data["cloner_submodules"])
if data["cloner_submodules"]:
data["cloner_submodule_depth"] = input_default_int(
"Limit for submodule discovery [<number>/0]:",
data["cloner_submodule_depth"]
)
data["detector"] = input_default_bool("Do you want to enable CI support? (detector) [y/n]", data["detector"])
def query_repo_info_recursive():
while True:
query_repo_info()
print("Actual settings:")
for key, value in data.items():
print(f"{key: <30} : {value}")
if not input_default_bool("Do you want to edit config? [y/n]", False):
break
def main() -> int:
check_privileges()
parse_args()
# determine debug
debug = pyip.inputYesNo("Enable verbose logging? [y/N]", default = "no", blank = True)
if debug == "yes":
log.info("Setting verbose logging")
log.setLevel(logging.DEBUG)
# defaults
data["cloner_project_name"] = ""
data["cloner_repo_url"] = ""
data["cloner_interval"] = 5
data["cloner_submodules"] = False
data["cloner_submodule_depth"] = 0
data["detector"] = True
query_repo_info_recursive()
project_path = base_dir.joinpath(f"{cloner_prefix}{data['cloner_project_name']}")
project_path.mkdir()
project_path.joinpath("repos").mkdir()
project_path.joinpath("cache").mkdir()
config_dir = project_path.joinpath("config")
config_dir.mkdir()
gen_config_file(config_dir, **data)
return 0

View File

@ -0,0 +1,11 @@
import logging
console_logger = logging.StreamHandler()
console_formatter = logging.Formatter(
"%(asctime)-15s.%(msecs)03d :: [%(levelname)8s] :: %(name)-15s :: %(message)s (%(filename)s:%(lineno)s)",
"%Y-%m-%d %H:%M:%S")
# setup logger
console_logger.setFormatter(console_formatter)
log = logging.getLogger("rc")
log.addHandler(console_logger)
log.setLevel(logging.INFO)

View File

@ -1,57 +1,53 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from repo_cloner.lib.logger_setup import log
import argparse import argparse
import logging import logging
import os import os
import subprocess
import logging as l import base64
console_logger = l.StreamHandler()
console_formatter = l.Formatter(
"%(asctime)-15s :: [%(levelname)8s] :: %(name)-15s :: %(message)s (%(filename)s:%(lineno)s)",
"%Y-%m-%d %H:%M:%S")
# setup logger
console_logger.setFormatter(console_formatter)
log = l.getLogger("rc")
log.addHandler(console_logger)
log.setLevel(logging.DEBUG)
from repo_cloner.lib.repo_dir_structure import RepoDirStructure from repo_cloner.lib.repo_dir_structure import RepoDirStructure
from repo_cloner.lib.cloner import Cloner from repo_cloner.lib import Cloner, DetectedCommit, prepare_git_auth, init_gh_token
from repo_cloner.lib.repo_tool import RepoTool
from git.config import GitConfigParser
from git.repo import Repo
from typing import Union
def config_try_override(config_writer: GitConfigParser, section: str, option: str, value: str): def detector_executor(commit: DetectedCommit):
if not section in config_writer.sections(): message = base64.b64encode(commit.log.encode()).decode()
log.debug(f"CFG Creating section: {section}") env = {
config_writer.add_section(section) "sha": commit.commit,
if not config_writer.has_option(section, option): "abbrev": commit.abbrev,
log.debug(f"CFG Creating option: {option}") "author": commit.author,
config_writer.add_value(section, option, "") "has_tags": commit.is_tag,
"has_branches": commit.is_branch,
"tags": commit.tags,
"branches": commit.branches,
"date": commit.date,
"log": message,
}
log.debug(f"Setting {section}.{option} = {value}") arg_list = ["/bin/echo", "laminarc", "queue", commit.project]
config_writer.set(section, option, value)
for key, val in env.items():
arg_list.append(f"COMMIT_{key.upper()}={val}")
subprocess.run(arg_list)
def main() -> int: def main() -> int:
def update(op_code: int, cur_count: Union[str, float], max_count: Union[str, float, None] = None,
message: str = ''):
log.debug(f"op: {op_code}; cur: {cur_count}/{max_count}; mess: {message}")
# parse input arguments # parse input arguments
parser = argparse.ArgumentParser(description = "repo-cloner entering script") parser = argparse.ArgumentParser(description = "repo-cloner entering script")
parser.add_argument('--base-dir', help = 'path to directory containing whole cloner structure', required = True, parser.add_argument('--base-dir', help = 'path to directory containing whole cloner structure', required = True,
default = None, type = str) default = None, type = str)
parser.add_argument('--debug', '-d', help = "enable debug output", action = 'store_true')
args = parser.parse_args() args = parser.parse_args()
if args.debug:
log.setLevel(logging.DEBUG)
log.info(f"Started processing git group in folder: {args.base_dir}") log.info(f"Started processing git group in folder: {args.base_dir}")
dirs = RepoDirStructure(args.base_dir) dirs = RepoDirStructure(args.base_dir)
log.debug(f"Patching XDG_CONFIG_HOME to mock up git config") log.debug(f"Patching XDG_CONFIG_HOME to mock up git config")
os.environ['XDG_CONFIG_HOME'] = dirs.conf_dir os.environ['XDG_CONFIG_HOME'] = dirs.conf_dir
init_gh_token()
# check dir existence # check dir existence
try: try:
assert dirs.dirs_exist assert dirs.dirs_exist
@ -67,7 +63,6 @@ def main() -> int:
log.critical(e.__str__()) log.critical(e.__str__())
return 1 return 1
# ignore invalid config lines
# check if there is project name & git url supplied # check if there is project name & git url supplied
config = dirs.config config = dirs.config
if len(config.cloner_repo_url) == 0: if len(config.cloner_repo_url) == 0:
@ -77,47 +72,16 @@ def main() -> int:
if len(config.cloner_project_name) == 0: if len(config.cloner_project_name) == 0:
log.warning("Config directive cloner_project_name should not be omitted!") log.warning("Config directive cloner_project_name should not be omitted!")
# cloner = Cloner(dirs) cloner = Cloner(dirs)
# cloner.check_interval() prepare_git_auth(cloner.main_repo_path, dirs.conf_dir)
if not cloner.sync():
log.warning(f"Repo sync did not succeed")
import subprocess if cloner.detector_enabled:
subprocess.run(["/usr/bin/rm", "-Rf", "/tmp/test/repos"]) cloner.detector_run(detector_executor)
subprocess.run(["/usr/bin/mkdir", "/tmp/test/repos"])
rt = RepoTool("/tmp/test/repos/main.git")
# rt.clone("https://github.com/u-boot/u-boot.git")
x = rt.clone_recursive("file:///home/vasek/dev/repo-cloner/tests/_support_data/test-repo-submodules-multilevel")
print(x)
# url = ""
# for x in rt._repo.remote("origin").urls:
# url = x
# url = url.replace("test-repo-base", "test-repo-reduced")
# url = url.replace("test-repo-base", "test-repo-changed-branches")
# rt._repo.remote("origin").set_url(url)
return 0 return 0
# from git import Repo
r = Repo("file:///home/vasek/dev/repo-cloner")
path: str = r._get_config_path("user")
print(path)
path = os.path.dirname(path)
print(path)
if not os.path.isdir(path):
os.mkdir(path)
cred_store: str = os.path.join(path, "git-credentials")
ssh_identity: str = os.path.join(dirs.conf_dir, "ssh", "identity")
with r.config_writer("user") as cfgw:
config_try_override(cfgw, "credential", "helper", f"store --file={cred_store}")
config_try_override(cfgw, "core", "sshcommand",
f"ssh -i {ssh_identity} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -q")
from repo_cloner.lib.cloner_config import ClonerConfigParser
ClonerConfigParser(os.path.join(dirs.conf_dir, "cloner.cfg"))
if __name__ == "__main__": if __name__ == "__main__":
exit(main()) exit(main())

View File

@ -7,6 +7,8 @@ setup(
version = '0.1', version = '0.1',
packages = find_packages(), packages = find_packages(),
entry_points = { entry_points = {
'console_scripts': ['process_repository_dir=repo_cloner.process_repository_dir:main'] 'console_scripts': [
'process_repository_dir=repo_cloner.process_repository_dir:main',
'initialization_wizzard=repo_cloner.initialization_wizzard:main',]
}, },
) )