2022-07-29 17:03:15 +02:00
|
|
|
import os.path
|
|
|
|
|
2022-07-29 04:22:05 +02:00
|
|
|
import git
|
2022-07-26 22:04:51 +02:00
|
|
|
from git import Repo
|
2022-07-30 20:29:59 +02:00
|
|
|
from git.exc import NoSuchPathError, InvalidGitRepositoryError, GitCommandError
|
2022-07-26 22:04:51 +02:00
|
|
|
from git import RemoteProgress
|
|
|
|
import logging
|
|
|
|
import time
|
2022-07-26 22:13:37 +02:00
|
|
|
from typing import Union, Optional
|
2022-07-29 17:03:15 +02:00
|
|
|
from repo_cloner.lib.checksum import gen_repo_hashed_name
|
2022-07-30 20:29:59 +02:00
|
|
|
from io import BytesIO
|
2022-07-26 22:04:51 +02:00
|
|
|
|
|
|
|
log = logging.getLogger("rc.repo")
|
|
|
|
|
|
|
|
|
|
|
|
class GitRemoteProgress(RemoteProgress):
|
|
|
|
OP_CODES = [
|
|
|
|
"BEGIN",
|
|
|
|
"CHECKING_OUT",
|
|
|
|
"COMPRESSING",
|
|
|
|
"COUNTING",
|
|
|
|
"END",
|
|
|
|
"FINDING_SOURCES",
|
|
|
|
"RECEIVING",
|
|
|
|
"RESOLVING",
|
|
|
|
"WRITING",
|
|
|
|
]
|
|
|
|
OP_CODE_MAP = {
|
|
|
|
getattr(RemoteProgress, _op_code): _op_code for _op_code in OP_CODES
|
|
|
|
}
|
|
|
|
|
|
|
|
last_step_time = time.time()
|
|
|
|
time_thr = 0.5
|
|
|
|
|
|
|
|
cur_task: str = ""
|
|
|
|
cur_task_max: int = 0
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
super().__init__()
|
|
|
|
self.last_step_time = time.time() - self.time_thr
|
|
|
|
self.cur_task_max = 0
|
|
|
|
self.cur_task = ""
|
|
|
|
|
|
|
|
def __del__(self) -> None:
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_curr_op(cls, op_code: int) -> str:
|
|
|
|
"""Get OP name from OP code."""
|
|
|
|
# Remove BEGIN- and END-flag and get op name
|
|
|
|
op_code_masked = op_code & cls.OP_MASK
|
|
|
|
return cls.OP_CODE_MAP.get(op_code_masked, "?").title()
|
|
|
|
|
|
|
|
def finish(self):
|
|
|
|
log.info(f"GIT {self.cur_task}: 100.00% ({self.cur_task_max})")
|
|
|
|
|
|
|
|
def update(
|
|
|
|
self,
|
|
|
|
op_code: int,
|
2022-07-26 22:13:37 +02:00
|
|
|
cur_count: Union[float, str],
|
2022-07-26 22:16:07 +02:00
|
|
|
max_count: Union[float, str, None] = None,
|
2022-07-26 22:13:37 +02:00
|
|
|
message: Optional[str] = "",
|
2022-07-26 22:04:51 +02:00
|
|
|
) -> None:
|
|
|
|
# Do i need to update?
|
|
|
|
# -> begin : YES
|
|
|
|
# -> end : YES
|
|
|
|
# -> timer: YES
|
|
|
|
|
|
|
|
# so check timer
|
|
|
|
if (self.last_step_time + self.time_thr) > time.time():
|
2022-07-29 17:03:15 +02:00
|
|
|
# timer not passed yet repo.head.reset(commit = 'origin/master', index = True, working_tree = True)
|
2022-07-26 22:04:51 +02:00
|
|
|
if not ((op_code & self.BEGIN) or (op_code & self.BEGIN)):
|
|
|
|
# skip -> no begin or end
|
|
|
|
return
|
|
|
|
# update timer
|
|
|
|
self.last_step_time = time.time()
|
|
|
|
|
|
|
|
# Start new bar on each BEGIN-flag
|
|
|
|
if op_code & self.BEGIN:
|
|
|
|
self.cur_task = self.get_curr_op(op_code).upper()
|
|
|
|
try:
|
|
|
|
self.cur_task_max = int(max_count)
|
|
|
|
except ValueError:
|
|
|
|
self.cur_task_max = 100
|
|
|
|
|
|
|
|
log.info(f"GIT {self.cur_task} started")
|
|
|
|
|
|
|
|
percent = round(100 * (cur_count / self.cur_task_max), 2)
|
|
|
|
|
|
|
|
# End progress monitoring on each END-flag
|
|
|
|
if op_code & self.END:
|
|
|
|
# logger.info("Done: %s", self.curr_op)
|
|
|
|
percent = 100
|
|
|
|
|
|
|
|
log.info(f"GIT {self.cur_task}: {percent}% ({cur_count}; {message})")
|
|
|
|
|
|
|
|
|
|
|
|
class RepoTool:
|
|
|
|
_repo: Repo = None
|
|
|
|
_initialized: bool = False
|
|
|
|
_bare: bool = False
|
|
|
|
_path: str = ""
|
2022-07-27 00:18:13 +02:00
|
|
|
_last_fetch_data = []
|
2022-07-29 17:03:15 +02:00
|
|
|
_recursive_discovery_urls: set = set()
|
|
|
|
_recursive_discovery_cloned: set = set()
|
|
|
|
_submodule_discovery_history: list = []
|
2022-07-26 22:04:51 +02:00
|
|
|
|
|
|
|
def __init__(self, path: str):
|
|
|
|
log.info(f"Initializing repository at {path}")
|
|
|
|
self._path = str(path)
|
2022-07-29 17:03:15 +02:00
|
|
|
self._recursive_discovery_cloned = set()
|
|
|
|
self._recursive_discovery_urls = set()
|
|
|
|
self._submodule_discovery_history: list = []
|
2022-07-26 22:04:51 +02:00
|
|
|
try:
|
|
|
|
self._repo = Repo(path, expand_vars = False)
|
|
|
|
self._initialized = True
|
|
|
|
self._bare = self._repo.bare
|
|
|
|
|
|
|
|
except (NoSuchPathError, InvalidGitRepositoryError) as e:
|
|
|
|
log.warning(f"Init failed: {str(e)}, continuing with uninitialized repo")
|
|
|
|
self._initialized = False
|
|
|
|
self._bare = False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def initialized(self) -> bool:
|
|
|
|
return self._initialized
|
|
|
|
|
|
|
|
@property
|
|
|
|
def bare(self) -> bool:
|
|
|
|
return self._bare
|
|
|
|
|
|
|
|
@property
|
|
|
|
def path(self) -> str:
|
|
|
|
return self._path
|
|
|
|
|
2022-07-29 17:03:15 +02:00
|
|
|
@property
|
|
|
|
def cloned_submodules_url_list(self) -> list:
|
|
|
|
return list(self._recursive_discovery_cloned)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def discovered_submodules_commits(self) -> list:
|
|
|
|
return self._submodule_discovery_history
|
|
|
|
|
2022-07-30 20:29:59 +02:00
|
|
|
def _persist_submodule_commits(self, path: str):
|
|
|
|
with open(path, "w") as f:
|
|
|
|
for commit in self.discovered_submodules_commits:
|
|
|
|
line = f"{commit}\n"
|
|
|
|
f.write(line)
|
|
|
|
|
2022-07-27 00:18:13 +02:00
|
|
|
def __check_initialized(self):
|
|
|
|
def inner(*args):
|
|
|
|
fake_self: RepoTool = args[0]
|
|
|
|
if not fake_self._initialized:
|
|
|
|
log.critical(f"Repo {fake_self.path} is not initialized!")
|
|
|
|
return False
|
|
|
|
return self(*args)
|
|
|
|
|
|
|
|
return inner
|
|
|
|
|
2022-07-26 22:04:51 +02:00
|
|
|
def clone(self, url: str) -> bool:
|
|
|
|
if self._initialized:
|
|
|
|
log.warning(f"Trying to clone to initialized repository!")
|
|
|
|
return False
|
|
|
|
|
|
|
|
log.info(f"Cloning repository from url: {url}")
|
2022-07-30 20:29:59 +02:00
|
|
|
try:
|
|
|
|
self._repo = Repo.clone_from(
|
|
|
|
url,
|
|
|
|
to_path = self._path,
|
|
|
|
progress = GitRemoteProgress(),
|
|
|
|
bare = True,
|
|
|
|
mirror = True
|
|
|
|
)
|
|
|
|
except GitCommandError as e:
|
|
|
|
log.critical(f"Clone of {url} failed!")
|
|
|
|
log.critical(f"Exception: {e}")
|
|
|
|
self._initialized = False
|
|
|
|
return False
|
|
|
|
|
2022-07-26 22:04:51 +02:00
|
|
|
self._initialized = True
|
|
|
|
self._bare = self._repo.bare
|
|
|
|
|
|
|
|
return True
|
2022-07-27 00:18:13 +02:00
|
|
|
|
|
|
|
@__check_initialized
|
|
|
|
def fetch(self) -> bool:
|
2022-08-02 15:55:22 +02:00
|
|
|
log.info("Fetching repo :)")
|
2022-07-27 00:18:13 +02:00
|
|
|
if not len(self._repo.remotes):
|
|
|
|
log.warning(f"Repo: {self._path} does not contain any remotes!")
|
|
|
|
return False
|
|
|
|
# fetch all remotes
|
|
|
|
remote = self._repo.remotes[0]
|
|
|
|
log.debug(f"Fetching remote: {remote.name} url: {next(remote.urls)}")
|
|
|
|
self._last_fetch_data = remote.fetch(
|
2022-07-27 12:52:47 +02:00
|
|
|
["+refs/heads/*:refs/heads/*", "+refs/tags/*:refs/tags/*"],
|
2022-07-27 00:18:13 +02:00
|
|
|
progress = GitRemoteProgress(),
|
2022-08-05 23:05:33 +02:00
|
|
|
kill_after_timeout = 600,
|
2022-07-27 00:18:13 +02:00
|
|
|
prune = True
|
|
|
|
)
|
|
|
|
log.debug("Fetch finished!")
|
|
|
|
return True
|
2022-07-27 12:52:47 +02:00
|
|
|
|
|
|
|
@__check_initialized
|
|
|
|
def repo_fingerprint(self) -> Union[str, bool]:
|
|
|
|
log.debug("Getting repo fingerprint")
|
|
|
|
# reference count
|
|
|
|
ref_count = self._repo.git.rev_list(count = True, all = True)
|
|
|
|
tags = [f"{tag.name}/{tag.commit}" for tag in self._repo.tags]
|
|
|
|
branches = [f"{branch.name}/{branch.commit}" for branch in self._repo.branches]
|
|
|
|
log.debug(f"{ref_count} references, {len(tags)} tags, {len(branches)} branches")
|
|
|
|
cumulative = f"{ref_count} {'.'.join(tags)} {' '.join(branches)}".encode()
|
|
|
|
import hashlib
|
|
|
|
x = hashlib.sha256(cumulative).hexdigest()
|
|
|
|
log.debug(f"Repo fingerprint is {x}")
|
|
|
|
return x
|
2022-07-29 04:22:05 +02:00
|
|
|
|
2022-08-02 15:55:22 +02:00
|
|
|
@__check_initialized
|
|
|
|
def list_commits(self, max_depth: Optional[int] = None):
|
2022-08-02 20:53:29 +02:00
|
|
|
return self._repo.iter_commits(all = True, max_count = max_depth, reverse = True)
|
2022-08-02 15:55:22 +02:00
|
|
|
|
2022-08-03 16:42:29 +02:00
|
|
|
@__check_initialized
|
|
|
|
def list_branches(self) -> Union[dict, bool]:
|
|
|
|
log.debug(f"Listing branches of repo")
|
|
|
|
branches = {}
|
|
|
|
for branch in self._repo.branches:
|
|
|
|
branches[branch.name] = branch.commit.hexsha
|
|
|
|
log.debug(f"Found {len(branches)}")
|
|
|
|
return branches
|
|
|
|
|
|
|
|
@__check_initialized
|
|
|
|
def list_tags(self):
|
|
|
|
log.debug(f"Listing tags of repo")
|
|
|
|
tags = {}
|
|
|
|
for tag in self._repo.tags:
|
|
|
|
tags[tag.name] = tag.commit.hexsha
|
|
|
|
log.debug(f"Found {len(tags)} tags")
|
|
|
|
return tags
|
|
|
|
|
2022-07-29 17:03:15 +02:00
|
|
|
@__check_initialized
|
|
|
|
def list_submodules(self, commit: str = "HEAD") -> Union[list, bool]:
|
2022-07-29 04:22:05 +02:00
|
|
|
commit = self._repo.commit(commit)
|
|
|
|
submodules = []
|
|
|
|
|
|
|
|
if '.gitmodules' in commit.tree:
|
|
|
|
git_modules: git.Blob = commit.tree['.gitmodules']
|
|
|
|
x = BytesIO(git_modules.data_stream.read())
|
|
|
|
x.name = git_modules.name
|
|
|
|
|
2022-07-30 20:29:59 +02:00
|
|
|
try:
|
|
|
|
parser = git.GitConfigParser(x, read_only = True)
|
|
|
|
for section in parser.sections():
|
|
|
|
if parser.has_option(section, "url"):
|
|
|
|
submodules.append(parser.get_value(section, "url"))
|
|
|
|
except BaseException:
|
|
|
|
return False
|
2022-07-29 04:22:05 +02:00
|
|
|
|
2022-07-29 17:03:15 +02:00
|
|
|
if commit.hexsha not in self._submodule_discovery_history:
|
|
|
|
self._submodule_discovery_history.append(commit.hexsha)
|
|
|
|
|
2022-07-29 04:22:05 +02:00
|
|
|
return submodules
|
|
|
|
|
2022-07-29 17:03:15 +02:00
|
|
|
@__check_initialized
|
|
|
|
def list_submodules_history(self, limit_of_commits: Optional[int] = None) -> Union[list, bool]:
|
2022-07-29 04:22:05 +02:00
|
|
|
|
|
|
|
log.info(f"Listing repository submodule history")
|
2022-08-02 15:55:22 +02:00
|
|
|
iterator = self.list_commits(limit_of_commits)
|
2022-07-29 04:22:05 +02:00
|
|
|
submodules = set()
|
|
|
|
counter: int = 0
|
|
|
|
last_status = time.time()
|
|
|
|
status_offset = 0.5
|
|
|
|
for commit in iterator:
|
|
|
|
counter += 1
|
|
|
|
commit_submodules = self.list_submodules(commit)
|
2022-07-30 20:29:59 +02:00
|
|
|
if not type(commit_submodules) == bool:
|
|
|
|
submodules.update(commit_submodules)
|
2022-07-29 04:22:05 +02:00
|
|
|
if time.time() >= last_status + status_offset:
|
|
|
|
log.info(f"Submodule discovery: {counter} commits finished, {len(submodules)} discovered")
|
|
|
|
last_status = time.time()
|
|
|
|
return list(submodules)
|
2022-07-29 17:03:15 +02:00
|
|
|
|
|
|
|
def clone_recursive(
|
|
|
|
self,
|
|
|
|
main_url: str,
|
2022-07-30 20:29:59 +02:00
|
|
|
scan_cache_dir: Optional[str] = None,
|
2022-07-29 17:03:15 +02:00
|
|
|
scan_depth: Optional[int] = None
|
|
|
|
) -> bool:
|
|
|
|
|
|
|
|
log.info(f"Started recursive clone of {main_url} with recursive discovery limited to {scan_depth} commits")
|
|
|
|
# clone main repo
|
|
|
|
if not self.clone(main_url):
|
|
|
|
log.critical(f"Clone of main repository failed!")
|
|
|
|
return False
|
|
|
|
|
|
|
|
# discover submodules for repository
|
|
|
|
submodules = self.list_submodules_history(scan_depth)
|
|
|
|
if submodules:
|
|
|
|
for submodule in submodules:
|
|
|
|
self._recursive_discovery_urls.add(submodule)
|
|
|
|
|
|
|
|
everything_succeed: bool = True
|
|
|
|
everything_cloned: bool = False
|
|
|
|
while not everything_cloned:
|
|
|
|
# recursively scan and clone repositories
|
|
|
|
everything_cloned = True
|
|
|
|
# for every url in list
|
|
|
|
# list() is needed - Runtime Error for set() changed during iteration
|
|
|
|
for url in list(self._recursive_discovery_urls):
|
|
|
|
if url not in self._recursive_discovery_cloned:
|
|
|
|
everything_cloned = False
|
|
|
|
# generate new path
|
|
|
|
directory = os.path.dirname(self.path)
|
|
|
|
submodule_cloner = RepoTool(os.path.join(directory, gen_repo_hashed_name(url)))
|
|
|
|
# clone
|
|
|
|
cloned: bool = submodule_cloner.clone(url)
|
|
|
|
# mark cloned even if failed afterwards - while loop stuck solution
|
|
|
|
self._recursive_discovery_cloned.add(url)
|
|
|
|
if not cloned:
|
|
|
|
log.critical(f"Clone of submodule: {url} failed")
|
|
|
|
everything_succeed = False
|
|
|
|
continue
|
|
|
|
# scan for submodules
|
|
|
|
submodules = submodule_cloner.list_submodules_history(scan_depth)
|
|
|
|
if type(submodules) == bool and not submodules:
|
|
|
|
log.critical(f"Submodule discovery for {url} failed!")
|
|
|
|
everything_succeed = False
|
|
|
|
continue
|
|
|
|
|
2022-07-30 20:29:59 +02:00
|
|
|
# persistor
|
|
|
|
if scan_cache_dir:
|
|
|
|
cache_file = os.path.join(scan_cache_dir, gen_repo_hashed_name(url))
|
|
|
|
log.debug(
|
|
|
|
f"Saving {len(submodule_cloner.discovered_submodules_commits)} commits into {cache_file}")
|
|
|
|
submodule_cloner._persist_submodule_commits(cache_file)
|
|
|
|
|
2022-07-29 17:03:15 +02:00
|
|
|
for submodule in submodules:
|
|
|
|
self._recursive_discovery_urls.add(submodule)
|
|
|
|
|
2022-07-30 20:29:59 +02:00
|
|
|
if scan_cache_dir:
|
|
|
|
# persist main repo commits
|
|
|
|
cache_file = os.path.basename(self.path)
|
|
|
|
cache_file = os.path.join(scan_cache_dir, cache_file)
|
|
|
|
self._persist_submodule_commits(cache_file)
|
|
|
|
|
|
|
|
# persist discovered submodule urls
|
|
|
|
with open(os.path.join(scan_cache_dir, "submodules.cache"), "w") as f:
|
|
|
|
f.write("\n".join(self.cloned_submodules_url_list))
|
|
|
|
|
2022-07-29 17:03:15 +02:00
|
|
|
return everything_succeed
|