import logging from repo_cloner.lib import Detector, DetectedCommit, DiskStoredList, DiskStoredRefs import pytest from unittest.mock import patch, MagicMock, PropertyMock from git import Actor from cloner_test_fixtures import cloner_dir_struct from collections import namedtuple import json @pytest.fixture def detector_obj(cloner_dir_struct): return Detector( cloner_dir_struct.joinpath("repos", "repo.git"), cloner_dir_struct.joinpath("cache"), "Mocked Project" ) some_commits = [ "d766d9c302463257695a4d53d857a2cecd024414", "a2e308bb9c5e59e7a0c319dca0adf3966f3f3a60", "36021088abb6ebcb2202897f3f27b26b21c25068", "e811211f0895e9e792f569b1b38f8452e0efe42f", "6854c793fd2cbd848650a94b9fed4924e5e428b2", "2f08263b5aff30ce776bb0350503be69d45647da", "fc53c9d8bc9e01c63632376df51abb67ca344a89", "c95a862af3d13623da3d7a6b73ff79d68ff2d7ad", "b21c1e9e329bcdef1980462cf62c09a9e807dafd", "dec7ff27d658412f0088c4c3220990f1f1dc98f1", "ad66f820c9dfa06c3767f81cb42b1398fa61ff05", "b9af12b9f80b372c645b5f64c5a8f8b6a148ccfc", "1fda6025ed303ecf03b9afa7cce882aaddc058f9", "d33585f0a72d9de6c481891787e4d956b769ccb5", "7f685afca71731cb9276f937fc8cceca6b47b1f2", "af112317aa2459e88db9cb10533ea6f29818afee", "ddc24a34983ff43b61e1bdf27b7ec2357932e7e0", "7ccca22ba367433eedb226310373fd250a2af725", "e4843c6d2391d0b45beea81e0dd34078484b474f", "fe942d46c33fee0e68d2e0ed64dfa6515dbf89c4", "1d80638229e0a7ef62b3b9cdab9ad9f63acf6d38", "f968ea00a3007034ddf196285d7b60fec5e7fcf0", "490859dbebe02af6bd643a89b6897961674f90fd", "066de679c906a78f7f817ee9a48d3021fd8b6b7c", "2a8277687fb6dee742e6e0193ea95fcd2264fbc4", "8808c649a8b0a5279a2d9a46b730254cad649205", "ad5db175fab144a8851b712195733c4a9c00d699", "36d45a4c1e5ad8c47a14881c5427045c3de095d0", "6b0b9affedb3e3daa4df00cd54678d78ea1c1d94", "aa5056610ff57f73bae9633a985c6a8e41f3bc23", ] branch_payload = [ ("doctrine", "fb96c1445ac20f224bbc6c32fdce145fb91af33d"), ("gitlab", "26538d26f9c377b2e81cd001205bf85a8241b8be"), ("limonade", "6c77dc4fdff0438b6badc7344e45cd0f03f5c872"), ("master", "aa5056610ff57f73bae9633a985c6a8e41f3bc23"), ("xlsshop", "5f59a880b4db820109f9f0420e66f7047322b12b"), ] tag_payload = [ ("v582", "0915ef3cb1890450c66ea4104c6b4ee021e1a0d3"), ("v584", "70508264e064bd1130a32ad8b66095f75d565b3c"), ("v585", "70508264e064bd1130a32ad8b66095f75d565b3c"), ("v587", "a7bdbe986402921b67443a4748f3b0f56f4aa19d"), ("v588", "6ed749cf30f25b2b08f94970dcb598af273ca0cb"), ("v589", "78d810634e0925e29c33ac203fa4997baeabf533"), ("v590", "01abecc6a90f2257878f6929ec495720164d9e3d"), ("v595", "7c7db8c441f1e4864cc7396cf633dc45ef4b2894"), ("v597", "21a829cf4479c0482fe5ef815a2d3833cc39bc76"), ("v601", "dd470e1072ebc79a5c4b3f1faa89885e38c0134f"), ("test-testu", "729bbbccdb7a92abb4c9aa36b8a6ba18dc25d9b3"), ("v1001", "02ea4a101a6fc0558ddceca03ff1241abcf3c338"), ("v1003", "50c64c32e65a54db8403362b5bcd3a336990bc57"), ("v1004", "e9e87bb8cb1805835f7622ef53b07909ba6ac02c"), ("v1005", "c1ea58e1773a9e7c81a335e89f4b59739b07b672"), ("v1006", "fde748fdad0df54d027e3243f4ae504cbdb137f0"), ("v1007", "739737e5fd2cb8d3e2973fb6077dc97ca4e08483"), ] def test_init(cloner_dir_struct): with patch("repo_cloner.lib.Detector.check_legacy_config", autospec = True) as p: det = Detector( cloner_dir_struct.joinpath("repos", "repo.git").as_posix(), cloner_dir_struct.joinpath("cache").as_posix(), "Mocked Project" ) assert p.called assert cloner_dir_struct.joinpath("cache", "detector").exists() assert cloner_dir_struct.joinpath("cache", "detector").is_dir() assert len(det._executed) == 0 # test loading of content cloner_dir_struct.joinpath("cache", "detector", "detectorExecuted").write_text( "commit1\ncommit2\ncommit3\n") det = Detector( cloner_dir_struct.joinpath("repos", "repo.git").as_posix(), cloner_dir_struct.joinpath("cache").as_posix(), "Mocked Project" ) assert 3 == len(det._executed) assert not det._repo.initialized assert det._repo_path == cloner_dir_struct.joinpath("repos", "repo.git") assert det._branches.count() == 0 assert det._tags.count() == 0 def test_ref_dir_to_json(cloner_dir_struct, caplog): caplog.set_level(0) with patch("repo_cloner.lib.Detector.check_legacy_config", autospec = True) as p: det = Detector( cloner_dir_struct.joinpath("repos", "repo.git").as_posix(), cloner_dir_struct.joinpath("cache").as_posix(), "Mocked Project" ) assert p.called dir = cloner_dir_struct.joinpath("tst") dir.mkdir() dir.joinpath("messed-up-dir").mkdir(parents = True) for branch, commit in branch_payload: dir.joinpath(branch).write_text(f"{commit}\n") Detector._ref_dir_to_json(dir) assert dir.is_file() assert dir.read_text() == \ '[["doctrine", "fb96c1445ac20f224bbc6c32fdce145fb91af33d"], ["gitlab", ' \ '"26538d26f9c377b2e81cd001205bf85a8241b8be"], ["limonade", ' \ '"6c77dc4fdff0438b6badc7344e45cd0f03f5c872"], ["master", ' \ '"aa5056610ff57f73bae9633a985c6a8e41f3bc23"], ["xlsshop", ' \ '"5f59a880b4db820109f9f0420e66f7047322b12b"]]' def test_check_legacy_config(detector_obj): branch_dir = detector_obj._detector_dir.joinpath("branches") tag_dir = detector_obj._detector_dir.joinpath("tags") branch_dir.mkdir() tag_dir.mkdir() for branch, commit in branch_payload: branch_dir.joinpath(branch).write_text(f"{commit}\n") for tag, commit in tag_payload: tag_dir.joinpath(tag).write_text(f"{commit}\n") detector_obj.check_legacy_config() assert branch_dir.is_file() assert tag_dir.is_file() branches = json.loads(branch_dir.read_text()) tags = json.loads(tag_dir.read_text()) assert dict(branches) == dict(branch_payload) assert dict(tags) == dict(tag_payload) def test_initialize_caches(tmp_path): commits = [commit for commit in some_commits] [commits.append(commit) for _, commit in branch_payload] [commits.append(commit) for _, commit in tag_payload] Commit = namedtuple("Commit", ["hexsha"]) commits.sort() commits_named = [Commit(commit) for commit in commits] branches = {} tags = {} for key, value in branch_payload: branches[key] = value for key, value in tag_payload: tags[key] = value branches = dict(sorted(branches.items())) tags = dict(sorted(tags.items())) mocks = { 'list_commits': MagicMock(return_value = commits_named), 'list_branches': MagicMock(return_value = branches), 'list_tags': MagicMock(return_value = tags), } repo = tmp_path.joinpath("repo.git") cache_dir = tmp_path.joinpath("cache") cache_dir.mkdir() with patch.multiple("repo_cloner.lib.RepoTool", **mocks): from repo_cloner.lib import RepoTool det = Detector(repo, cache_dir, "Mocked Project") det._tags._DiskStoredRefs__refs = {"tag1": "commit"} det._branches._DiskStoredRefs__refs = {"branch1": "commit"} det.initialize_caches() assert RepoTool.list_commits.called assert RepoTool.list_branches.called assert RepoTool.list_tags.called # load files cache_dir = cache_dir.joinpath("detector") assert cache_dir.joinpath("detectorExecuted").read_text() == "\n".join(commits) + "\n" assert dict(json.loads(cache_dir.joinpath("branches").read_text())) == branches assert dict(json.loads(cache_dir.joinpath("tags").read_text())) == tags def test_check_fingerprint(tmp_path): mocks = { 'repo_fingerprint': MagicMock(return_value = "FingerPrint"), } repo = tmp_path.joinpath("repo.git") cache_dir = tmp_path.joinpath("cache") cache_dir.mkdir() fp_file = cache_dir.joinpath("detectorSum") with patch.multiple("repo_cloner.lib.RepoTool", **mocks): det = Detector(repo, cache_dir, "Mocked") # file does not exist assert det.check_fingerprint() fp_file.touch() assert det.check_fingerprint() fp_file.write_text("FingerPrint") assert not det.check_fingerprint() def test_persist_fingerprint(tmp_path): mocks = { 'repo_fingerprint': MagicMock(return_value = "FingerPrint"), } repo = tmp_path.joinpath("repo.git") cache_dir = tmp_path.joinpath("cache") cache_dir.mkdir() fp_file = cache_dir.joinpath("detectorSum") with patch.multiple("repo_cloner.lib.RepoTool", **mocks): det = Detector(repo, cache_dir, "Mocked") det.persist_fingerprint() assert fp_file.read_text().strip() == "FingerPrint" def test_run(tmp_path, caplog): stamp = 1659533160 executed = [] def time(): nonlocal stamp stamp += 1 return stamp def exec(env: DetectedCommit): nonlocal executed executed.append(env) # commits - new version would have 10 more commits commits_old = [commit for commit in some_commits[0:25]] commits_new = [commit for commit in some_commits] # branches # - removed branch doctrine # - master targets 36d45a4c1e5ad8c47a14881c5427045c3de095d0 (not cached) # - new branch dupla targeting 7f685afca71731cb9276f937fc8cceca6b47b1f2 branches_old = {} branches_new = {} for key, value in branch_payload: branches_old[key] = value if key == "doctrine": continue if key == "master": branches_new[key] = "36d45a4c1e5ad8c47a14881c5427045c3de095d0" continue branches_new[key] = value branches_new['dupla'] = "7f685afca71731cb9276f937fc8cceca6b47b1f2" # tags # - removed test testu # - v1001 and v1003 point to different commit # - new tag super pointing at 7f685afca71731cb9276f937fc8cceca6b47b1f2 tags_old = {} tags_new = {} for key, value in tag_payload: tags_old[key] = value if key == "test-testu": continue if key == "v1001": tags_new[key] = "d766d9c302463257695a4d53d857a2cecd024414" continue if key == "v1003": tags_new[key] = "a2e308bb9c5e59e7a0c319dca0adf3966f3f3a60" continue tags_new[key] = value tags_new['super'] = "7f685afca71731cb9276f937fc8cceca6b47b1f2" tags_new['v1003.3'] = "a2e308bb9c5e59e7a0c319dca0adf3966f3f3a60" # new commits for _, commit in branches_new.items(): if commit not in commits_new: commits_new.append(commit) for _, commit in tags_new.items(): if commit not in commits_new: commits_new.append(commit) # storage directory cache_dir = tmp_path.joinpath("cache") detector_dir = cache_dir.joinpath("detector") cache_dir.mkdir() detector_dir.mkdir() # sort commits_old.sort() commits_new.sort() # prepare files # detectorExecuted detector_dir.joinpath("detectorExecuted").write_text("\n".join(commits_old) + "\n") # branches detector_dir.joinpath("branches").write_text(json.dumps(dict(sorted(branches_old.items())))) # tags detector_dir.joinpath("tags").write_text(json.dumps(dict(sorted(tags_old.items())))) Commit = namedtuple("Commit", ["hexsha", "author", "authored_date", "message"]) author = Actor("Tester", "tester@email.me") commits_named_new = [Commit(commit, author, authored_date = time(), message = "Test commit") for commit in commits_new] mocks = { 'list_commits': MagicMock(return_value = commits_named_new), 'list_branches': MagicMock(return_value = branches_new), 'list_tags': MagicMock(return_value = tags_new), 'repo_fingerprint': MagicMock(return_value = "some-fingerprint"), } repo = tmp_path.joinpath("repo.git") caplog.set_level(logging.DEBUG) with patch.multiple("repo_cloner.lib.RepoTool", **mocks): from repo_cloner.lib import RepoTool det = Detector(repo, cache_dir, "Mocked Project") det.run(exec) assert RepoTool.list_commits.called assert RepoTool.list_branches.called assert RepoTool.list_tags.called assert "Branch doctrine removed in source, removing from detector" in caplog.text assert "Tag test-testu removed in source, removing from detector" in caplog.text """ for x in executed: print(x.dict) """ assert len(executed) == 24 assert sum(ex.is_tag for ex in executed) == 3 assert sum(ex.is_branch for ex in executed) == 2 assert any(ex.branches == 'dupla' and ex.tags == 'super' for ex in executed) assert any(ex.tags == 'v1003, v1003.3' for ex in executed) assert any(ex.tags == 'v1001' for ex in executed) assert any(ex.branches == 'master' for ex in executed) assert all(ex.commit not in commits_old or ex.is_tag for ex in executed) # new commits persisted? commits = DiskStoredList(cache_dir.joinpath("detector", "detectorExecuted").as_posix()) for commit in commits_new: assert commit in commits refs = DiskStoredRefs(cache_dir.joinpath("detector", "tags")) for tag, commit in tags_new.items(): assert commit == refs.get(tag)