move old files to old directory

Signed-off-by: Václav Valíček <valicek1994@gmail.com>
This commit is contained in:
2022-05-04 09:46:41 +02:00
parent 3d12f83d26
commit b536c883de
28 changed files with 0 additions and 0 deletions

1
old/.dockerignore Normal file
View File

@@ -0,0 +1 @@
creator/*

43
old/Dockerfile Normal file
View File

@@ -0,0 +1,43 @@
# Debian base image
FROM registry.sw3.cz/valicek1/lxcbian-bullseye
VOLUME /data
# install dependencies
RUN apt-get update
RUN apt-get full-upgrade -y --no-install-recommends
RUN apt-get install -y --no-install-recommends \
git bash openssh-server parallel vim cron npm nodejs logrotate
RUN apt-get clean
# create executor user
RUN useradd --uid 10000 -ms /bin/bash executor
# install crontab, enable cron
ADD crontab /etc/cron.d/repo-cloner-executor
RUN bash -c "dpkg -l | grep cron"
RUN systemctl enable ssh cron
ADD dockerbin/* checker/* src/* /usr/local/bin/
# remove tty autospawn from inittab
# disable motd
RUN echo > /etc/motd
# install tail logger
RUN npm i frontail -g
# enable frontail at startup
ADD startup-logcat /usr/local/bin
ADD logcat.service /etc/systemd/system
RUN systemctl enable logcat
# enable log dumping & rotation
ADD startup-logdump /usr/local/bin
ADD syslog.logrotate /etc/logrotate.d/syslog
ADD logdump.service /etc/systemd/system
RUN systemctl enable logdump
RUN ln -fs /usr/share/zoneinfo/Europe/Prague /etc/localtime
CMD [ "/sbin/init" ]

27
old/Makefile Normal file
View File

@@ -0,0 +1,27 @@
default: cloner
GLOBALVOL ?= cloner-global
TAGOWNER = valicek1
TAGMAIN = repo-cloner
TAGBASE= $(TAGOWNER)/$(TAGMAIN)
cloner:
docker build -t $(TAGBASE) .
run: cloner
docker run -v $(GLOBALVOL):/data -it --rm $(TAGBASE)
once: cloner
docker run -v $(GLOBALVOL):/data -it --rm $(TAGBASE) /usr/local/bin/cron-command
bash: cloner
docker run -v $(GLOBALVOL):/data -it --rm $(TAGBASE) /bin/bash
# wizzard
wizzard: cloner
docker run -v $(GLOBALVOL):/data -it --rm $(TAGBASE) /usr/local/bin/wizzard
singleshot: cloner
docker run -v $(GLOBALVOL):/data -e BASE=/data/cloner-t456 --user executor -it --rm $(TAGBASE) run-checker

View File

@@ -0,0 +1,18 @@
#!/bin/bash
# library made for sourcing - just to prepare git auth environment
function prepareGitAuth(){
# usage
# $1 - config directory
confdir=$1/auth
[ -d $confdir ] || mkdir $confdir
[ -d $confdir/ssh ] || mkdir $confdir/ssh
chmod 0700 $confdir
# git configure http authenticator
git config --global credential.helper "store --file=$confdir/git-credentials"
# git configure ssh auth
git config --global core.sshcommand "ssh -i $confdir/ssh/identity -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -q"
}

View File

@@ -0,0 +1,11 @@
# library for work with config file
# check directories if they exist
[ -d $CONFIG_DIR ] || die "Config directory does not exist on volune - $CONFIG_DIR"
[ -f $CONFIG_DIR/cloner.cfg ] || die "Config file does not exist - create please cloner.cfg"
# load config files
source $CONFIG_DIR/cloner.cfg
# check if the url is specified
repo=${cloner_repo_url:-}
[ -z "$repo" ] && die "No repository url is specified" || true

View File

@@ -0,0 +1,11 @@
#!/bin/bash
# configure
export CCLONE_PATH=$BASE/repos
export CCLONE_CACHE=$BASE/cache
export CONFIG_DIR=$BASE/config
function die(){
echo "$@" 1>&2
exit 1
}

View File

@@ -0,0 +1,29 @@
#!/bin/bash
# check if varriable is set
[ -n "$CONFIG_DIR" ] || die "ConfigDir is not set - exit!"
function detectorRunCapable(){
if [ -f $CONFIG_DIR/detector.cfg ]
then
return 0
else
return 1
fi
}
function detectorLoadConfig(){
# load config
source $CONFIG_DIR/detector.cfg
# create cache dirs
DET_DIR=$CCLONE_CACHE/detector
DET_TAGS=$DET_DIR/tags
DET_BRANCHES=$DET_DIR/branches
[ -d $DET_TAGS ] || mkdir -p $DET_TAGS
[ -d $DET_BRANCHES ] || mkdir -p $DET_BRANCHES
}

View File

@@ -0,0 +1,33 @@
#!/bin/bash
function detectorTryInit(){
# repo dir
dir=$1
history=$DET_DIR/detectorExecuted
if ! [ -f $history ]
then
echo "Initializing detector cache"
# initialize seed
git --git-dir $dir log --all --format="%H" > $history
fi
}
function detectorCheckCommit(){
sha=$1
history=$DET_DIR/detectorExecuted
[ -f $history ] || touch $history
if grep -q $sha $history
then
return 1
else
return 0
fi
}
function detectorSaveCommit(){
sha=$1
history=$DET_DIR/detectorExecuted
echo $sha >> $history
}

View File

@@ -0,0 +1,51 @@
#!/bin/bash
function detectorSum(){
local dir=$1
[ -f $dir/FETCH_HEAD ] || touch $dir/FETCH_HEAD
# use md5sum - it is in busybox
cat $dir/FETCH_HEAD | md5sum | cut -f1 -d' '
}
function detectorSumPersist(){
detectorSum $1 > $CCLONE_CACHE/detectorSum
}
function detectorCheckFetchHead(){
# check if repo fetch_head changed, if so, return 1
local dir=$1
[ -f $CCLONE_CACHE/detectorSum ] || touch $CCLONE_CACHE/detectorSum
newSum=$(detectorSum $dir)
oldSum=$(cat $CCLONE_CACHE/detectorSum)
if [ "x$oldSum" = "x$newSum" ]
then
return 0
else
return 1
fi
}
function gitListTags(){
local dir=$1
git --git-dir="$dir" tag -l | cat
}
function gitListBranches(){
local dir=$1
git --git-dir="$dir" for-each-ref --format='%(refname:short)' refs/heads/
}
function gitPrefBranches(){
local dir=$1
gitListBranches $1 | grep master || true
gitListBranches $1 | grep upstream || true
gitListBranches $1 | sort | grep -vE 'master|upstream'
}

14
old/checker/notify-commit Executable file
View File

@@ -0,0 +1,14 @@
#!/bin/bash
# notify on commit push
#
# Parameters (via env)
#
# COMMIT - hash of commit
# COMMIT_AUTHOR - author of commit
# COMMIT_ABBREV - abbreviated sha
# COMMIT_LOG - short line of commit log
# COMMIT_BRANCH - name of branch
# PROJECT_NAME - project name specified in cloner.cfg
echo "Commit: ($COMMIT: (AUTHOR=$COMMIT_AUTHOR; BRANCH=$COMMIT_BRANCH), Project: $PROJECT_NAME)"
echo -e "\t$COMMIT_ABBREV: $COMMIT_LOG"

14
old/checker/notify-tag Executable file
View File

@@ -0,0 +1,14 @@
#/bin/bash
# notify on tag push (new tag or change)
#
# Parameters (via env)
#
# TAG_HASH - hash of tagged commit
# TAG_NAME - tag label
# TAG_AUTHOR - who authored the tag - if available
# TAG_ABBREV - commit abbreviated hash
# TAG_LOG - short status line of log
# PROJECT_NAME - name of project specified in cloner.cfg
echo "TAG: (Name=$TAG_NAME; AUTHOR=$TAG_AUTHOR; SHA: $TAG_HASH), PROJECT: $PROJECT_NAME"
echo -e "\t$TAG_ABBREV: $TAG_LOG"

176
old/checker/run-checker Executable file
View File

@@ -0,0 +1,176 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# source libs
mydir=$(dirname $(realpath $0))
source $mydir/cloner-lib-general
source $mydir/cloner-lib-auth
source $mydir/cloner-lib-cfg
source $mydir/detector-lib-cfg
# interval - in minutes
interval=${cloner_interval:-0}
stampfile=$CCLONE_CACHE/last-check-time
# does it exist - if not, sync
[ -d $CCLONE_CACHE ] || mkdir $CCLONE_CACHE
[ -f $stampfile ] || echo 0 > $stampfile
now=$(date +"%s")
last=$(cat $stampfile)
diff=$(($now - $last))
mindiff=$(($interval * 60))
unset now last
if [ $diff -lt $mindiff ]
then
echo "Limit not reached - not syncing now"
exit 0
fi
# check and clone repo
submodules=${cloner_submodules:-0}
depth=${cloner_submodule_depth:-}
export HOME=$CCLONE_CACHE
prepareGitAuth $CONFIG_DIR
# without submodule support
if [ ! "x$submodules" = "x1" ]
then
mirror-main-repo $repo
else
mirror-recursive $repo $depth
fi
date +"%s" > $stampfile
# if detector is not enabled, quit quietly
if ! detectorRunCapable
then
exit 0
fi
source $mydir/detector-lib-git
source $mydir/detector-lib-general
detectorLoadConfig
repodir=$(gen-mirror-path $repo)
if detectorCheckFetchHead $repodir
then
# nothing changed, just die
exit 0
fi
# try to init cache
detectorTryInit $repodir
# first, solve commits
# branches that were deleted or merged
find $DET_BRANCHES -type f | sort | while read branchpath
do
branch=$(basename $branchpath)
if ! gitListBranches $repodir | grep -q "^$branch$"
then
echo "Unexistent branch <$branch>!!"
# rm it, should be merget etc...
rm $DET_BRANCHES/$branch
fi
done
# new branches or new commits in curent branches
# firstly list master branch, to make first commits in it
gitPrefBranches $repodir | while read branch
do
[ -f $DET_BRANCHES/$branch ] || touch $DET_BRANCHES/$branch
oldsha=$(cat $DET_BRANCHES/$branch)
newsha=$(git --git-dir $repodir show-ref --heads $branch | cut -d' ' -f1)
[ -z "$oldsha" ] || oldsha=$oldsha..
# walk through every commit in branch (since last change)
for commitId in $(git --no-pager --git-dir $repodir log --reverse --format="%H" $oldsha$branch)
do
if detectorCheckCommit $commitId
then
# commit was not processed - start
# COMMIT = $commitId
# COMMIT_AUTHOR = $author
author=$(git --git-dir $repodir log $commitId -1 --format="%an <%ae>")
# COMMIT_ABBREV
abbr=$(git --git-dir $repodir log $commitId -1 --format="%h")
# COMMIT_LOG
log=$(git --git-dir $repodir log $commitId -1 --format="%s")
# BRANCH = $branch
# PORJECT_NAME = $cloner_project_name
set +e
COMMIT="$commitId" \
COMMIT_AUTHOR="$author" \
COMMIT_BRANCH="$branch" \
COMMIT_ABBREV="$abbr" \
COMMIT_LOG="$log" \
PROJECT_NAME="$cloner_project_name" \
notify-commit
rc=$?
[ $rc -eq 0 ] || echo "Notify $branch/$commitId: return code = $rc"
set -e
detectorSaveCommit $commitId
fi
done
echo $newsha > $DET_BRANCHES/$branch
done
# solve tags - remove nonexistent refs
find $DET_TAGS -type f | sort | while read tagname
do
tag=$(basename $tagname)
if ! git --git-dir="$repodir" rev-parse "tags/$tag" > /dev/null 2>&1
then
echo "Removing tag: $tag (was [$(cat $tagname)])"
rm $tagname
fi
done
# tags that changed or were pushed as new
gitListTags $repodir | while read tagname
do
[ -f $DET_TAGS/$tagname ] || touch $DET_TAGS/$tagname
oldsha=$(cat $DET_TAGS/$tagname)
newsha=$(git --git-dir $repodir show-ref --tags $tagname | cut -d' ' -f1)
if ! [ "x$oldsha" = "x$newsha" ]
then
# TAG_HASH = $newsha
# TAG_NAME = $tagname
# TAG_AUTHOR
author=$(git --git-dir $repodir log $newsha -1 --pretty=format:"%an <%ae>")
# TAG_ABBREV
abbr=$(git --git-dir $repodir log $newsha -1 --format="%h")
# TAG_LOG
log=$(git --git-dir $repodir log $newsha -1 --format="%s")
# PROJECT_NAME = $cloner_project_name
# call the notify script
set +e
TAG_HASH="$newsha" \
TAG_NAME="$tagname" \
TAG_AUTHOR="$author" \
TAG_ABBREV="$abbr" \
TAG_LOG="$log" \
PROJECT_NAME="$cloner_project_name" notify-tag
rc=$?
[ $rc -eq 0 ] || echo "Notify $tagname: return code = $rc"
set -e
fi
echo $newsha > $DET_TAGS/$tagname
done
# save hash to keep things clear
detectorSumPersist $repodir

5
old/crontab Normal file
View File

@@ -0,0 +1,5 @@
# executor crontab
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
MAILTO=""
* * * * * executor /usr/local/bin/cron-command >> /home/executor/cron.log 2>&1
0 0 * * * executor mv /home/executor/cron.log /home/executor/cron.old

44
old/dockerbin/cron-command Executable file
View File

@@ -0,0 +1,44 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
# try to include laminar env
if [ -f /etc/profile.d/laminar.sh ]
then
source /etc/profile.d/laminar.sh
fi
# if started as root
if [ $UID -eq 0 ]
then
find /data \! -user executor -exec chown executor:executor {} \;
su executor -c cron-command
exit $?
fi
# check lock
lock=/var/run/cloner.pid
dir_prefix=cloner
max_jobs=${JOBS:-3}
function die(){
echo $@ 1>&2
exit 1
}
# first, check process
if [ -f $lock ]
then
pid=$(cat $lock)
# if it still runs, die not so quietly
[ -n "$pid" ] && [ -d /proc/$pid ] && die "Another process running!"
fi
# else make some mess and setup trap
echo $BASHPID > $lock
find /data -maxdepth 1 -type d -name "${dir_prefix}-*" | \
parallel --lb -j $max_jobs -n 1 run-mirror-update
echo "Cron Finished"

35
old/dockerbin/run-mirror-update Executable file
View File

@@ -0,0 +1,35 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
function log(){
local title=${raw:-$name}
[ -z "$title" ] || title=" [$title]"
echo "[$(date +"%X")]$title $@"
}
function die(){
log "$@" 1>&2
exit 1
}
scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
function finish {
rm -rf "$scratch"
}
trap finish EXIT
# necessary checks
pathto=${1:-}
[ -n "$pathto" ] || die "No project specified"
raw=$(basename $pathto | sed 's/^cloner-//g')
# is it enabled?
[ -f "$pathto/.enabled" ] || die "$raw not enabled!"
env BASE=$pathto run-checker | while read line; do log "$line"; done

218
old/dockerbin/wizzard Executable file
View File

@@ -0,0 +1,218 @@
#!/bin/bash
set -euo pipefail
IFS=$'\n\t'
if [[ ! ":$PATH:" == *":/usr/local/bin:"* ]]
then
export PATH="/usr/local/bin:$PATH"
fi
# if started as root
if [ $UID -eq 0 ]
then
chown executor:executor /data
su executor -c $0
exit $?
fi
dir_prefix=cloner
function die(){
echo $@ 1>&2
exit 1
}
function checkProjectName(){
# check, if volume does not exist yet
name=$1
# should not be empty
[ -n "$read_project_name" ] || die "Empty project name is not allowed"
if [ -d /data/$dir_prefix-$name ]
then
die "Target volume for project '$name' exists - please try again!"
fi
}
function createConfigFile(){
# creates config file, writes it to $1 location
loc=$1
# vars
local tmpl_name=$read_project_name
local tmpl_url=$read_url
local tmpl_interval=$read_interval
local tmpl_submodules=$read_submodules
if [ $read_submodule_limit = "N" ]
then
local tmpl_submodules_depth_enabled='# '
local tmpl_submodules_depth=50000
else
local tmpl_submodules_depth_enabled=''
local tmpl_submodules_depth=$read_submodule_limit
fi
cat > $loc <<-EOF
# cloner.cfg
# main config
# created at $(date +"%Y-%m-%d %X")
# main url - url of main repo - just to clone
cloner_repo_url=$tmpl_url
# project name (names of volumes are derrived from this
cloner_project_name=$tmpl_name
# cloner interval (in minutes, default=0 - run always)
cloner_interval=$tmpl_interval
# do you need submodules support? (1/0)
cloner_submodules=$tmpl_submodules
# max depth of submodule scan (default = unlimited, uncomment to use)
${tmpl_submodules_depth_enabled}cloner_submodule_depth=$tmpl_submodules_depth
EOF
}
function createDetectorConfig(){
# $1 - file
local cfgFile=$1
mkdir -p $(dirname $cfgFile)
if [ $read_detector -eq 1 ]
then
cat > $cfgFile <<-EOF
# this file is config for detector
# now, it is empty - to disable detector, just delete it!
EOF
fi
}
function generateSSHKey(){
# generates ssh key with $1 path and $2 description
local keyfile=$1/identity
local description=$2
echo "Creating SSH deployment key.."
ssh-keygen -f $keyfile -t ed25519 -C "$description" -N ""
echo
echo "Public key is:"
echo "-----------------------------------------------------"
cat $keyfile.pub
echo "-----------------------------------------------------"
echo -n "Please make sure that key is set up at your git hosting and press enter.."
read
}
function reuseSSHKey(){
# pastes ssh key to file $1 with vim
local keyfile=$1
local scratch=$(mktemp)
echo "# Please paste private ssh key here and save this file" > $scratch
vim $scratch
sed -e 's/#.*$//' $scratch > $keyfile
rm $scratch
echo "Checking key..."
chmod 0700 $keyfile
ssh-keygen -y -f $keyfile -P "" || true # will fail in the end, so script will continue and clean up the mess
}
# start reading vars
echo -n "Enter project name: "
read read_project_name
checkProjectName "$read_project_name"
# repository URL
echo -n "Enter git repository URL: "
read read_url
[ -n "$read_url" ] || die "Empty url is not allowed!"
# check interval
echo -n "Enter check interval in minutes [5]: "
read read_interval
[ -n "$read_interval" ] || read_interval=5
[[ "$read_interval" =~ ^[0-9]+$ ]] || echo "Entered interval is not number. Try again.."
# submodule use
echo -n "Mirror including submodules? [Y/n]"
read read_submodules
[ -n "$read_submodules" ] || read_submodules=Y
[[ "$read_submodules" =~ ^[Yy]$ ]] && read_submodules=1 || read_submodules=0
# submodule limit
if [ $read_submodules -eq 1 ]
then
echo -n "Limit for submodule discovery [<number>/N]: "
read read_submodule_limit
[ -n "$read_submodule_limit" ] || read_submodule_limit=N
if ! [[ "$read_submodule_limit" =~ ^[Nn]$ ]]
then
[[ "$read_submodule_limit" =~ ^[0-9]+$ ]] || die "Submodule limit must be n,N or number!"
fi
else
read_submodule_limit=N
fi
# determine CI?
echo -n "Do you want to enable CI support? (detector) [Y/n]"
read read_detector
[ -n "$read_detector" ] || read_detector=Y
if ! [[ "$read_detector" =~ ^[Yy]$ ]]
then
read_detector=0
else
read_detector=1
fi
root=/data/$dir_prefix-$read_project_name
# start generating config
mkdir -p $root/config
createConfigFile $root/config/cloner.cfg
# use ssh config?
echo -n "Would you like to use SSH auth? ([C]reate new key/[U]se existing key/[N]o) [C/u/n]: "
read read_ssh
[ -n "$read_ssh" ] || read_ssh=C
[[ "$read_ssh" =~ ^[CcUuNn]$ ]] || die "Invalid SSH key option, script is exiting now.."
# ssh resolutions?
# create dir if needed
[[ "$read_ssh" =~ ^[nN]$ ]] || mkdir -p $root/config/auth/ssh
# generate new key
if [[ "$read_ssh" =~ ^[Cc]$ ]]
then
# create key
generateSSHKey $root/config/auth/ssh "cloner-deploy-key-$read_project_name"
fi
# use existing key
if [[ "$read_ssh" =~ ^[Uu]$ ]]
then
# load key
reuseSSHKey $root/config/auth/ssh/identity
fi
echo "First run - initialization of repos..."
if ! env BASE=$root run-checker
then
echo -n "First run failed - remove directory? [Y/n]"
read read_cleanup
[ -n "$read_cleanup" ] || read_cleanup=Y
if [[ "$read_cleanup" =~ ^[Yy]$ ]]
then
rm -Rf $root
fi
else
createDetectorConfig $root/config/detector.cfg
echo "Setup has finished!"
touch $root/.enabled
fi

13
old/logcat.service Normal file
View File

@@ -0,0 +1,13 @@
[Unit]
Description=Logcat service startup
Documentation=Fix ownership and start logcat
After=network.target
[Service]
Type=forking
User=root
ExecStart=/usr/local/bin/startup-logcat
Restart=on-failure
[Install]
WantedBy=multi-user.target

12
old/logdump.service Normal file
View File

@@ -0,0 +1,12 @@
[Unit]
Description=Dumps journalctl to syslog file
After=network.target
[Service]
Type=simple
User=root
ExecStart=/usr/local/bin/startup-logdump
Restart=on-failure
[Install]
WantedBy=multi-user.target

72
old/src/cclone Executable file
View File

@@ -0,0 +1,72 @@
#!/bin/bash
#
# Clone repository from mirror
#
# Usage:
# cclone <main repo url> [ -p <path> ] [ -c <checkout_ref> ]
# strict mode
set -euo pipefail
IFS=$'\n\t'
# include config
source $(dirname $(realpath $0))/config
source $(dirname $(realpath $0))/gen-mirror-path
# parse arguments
function usage(){
echo "Usage: $0 [-p <path>] [-c <checkout_ref>]" 1>&2
exit 1
}
repo=$1
if [[ ! "$repo" =~ ^-.* ]]
then
# check if repo was mirrored
if [ ! -d $(getRepoPath $repo) ]
then
echo "Specified repo wasn't mirrored yes, please do it so!" 1>&2
exit 1
fi
# kick args +1
shift
else
usage
fi
# other opts
while getopts "c:p:" o; do
case "${o}" in
c)
param_c=${OPTARG}
;;
p)
param_p=${OPTARG}
;;
\?)
usage
;;
esac
done
# Run clone
cloneurl=$(getRepoPath $repo)
clonepath=${param_p:-}
echo "Cloning $repo"
git clone file://$cloneurl $clonepath
checkout=${param_c:-}
if [ -n "$checkout" ]
then
chdir=${param_p:-$(getRepoUniq $repo)}
echo "Checking out $checkout"
oldpwd=$(pwd)
cd $chdir
# -b just to make git less verbose
git checkout $checkout -b _tmp_$checkout
cd $oldpwd
fi

14
old/src/config Normal file
View File

@@ -0,0 +1,14 @@
#!/bin/bash
# Default values
defaultMirrorPath=$HOME/.git-mirror
defaultCachePath=$HOME/.git-mirror-cache
# Config ones - replaced with environment vars
cfgMirrorPath=${CCLONE_PATH:-$defaultMirrorPath}
cfgCachePath=${CCLONE_CACHE:-$defaultCachePath}
# modify PATH
bindir=$(dirname $(realpath $0))
PATH=$bindir:$PATH

42
old/src/gen-mirror-path Executable file
View File

@@ -0,0 +1,42 @@
#!/bin/bash
source $(dirname $(realpath $0))/config
function getRepoUniq(){
base=$(basename $1 .git)
rest=$(dirname $1)
# .git (working repo)
if [ "$base" == ".git" ]
then
base=$(basename $rest)
rest=$(dirname $rest)
fi
# extract username - or path
namespace=$(basename $rest)
# solve ssh domain:namespace/repo.git
if [[ "$namespace" == *:* ]]
then
namespace=$(echo $namespace | cut -d':' -f2)
fi
csum=$(echo $1 | cksum | cut -d' ' -f1)
echo ${namespace}_${base}_${csum}
}
function getRepoPath(){
uniq=$(getRepoUniq $1)
echo $cfgMirrorPath/$uniq.git
}
function getRepoCache(){
uniq=$(getRepoUniq $1)
echo $cfgCachePath/$uniq
}
# it the script is not sourced
if [[ ! "${BASH_SOURCE[0]}" != "${0}" ]]
then
getRepoPath $1
fi

50
old/src/mirror-main-repo Executable file
View File

@@ -0,0 +1,50 @@
#!/bin/bash
#
# Just mirror (clone or fetch) specified git repository
# - no other mess (eg submodules, just clean mirror)
#
# Usage:
# mirror-main-repo <url>
# Unofficial strict mode
set -euo pipefail
IFS=$'\n\t'
source $(dirname $(realpath $0))/gen-mirror-path
function updateOrCreate(){
url=$1
repodir=$(getRepoPath $url)
if [ ! -d $repodir ]
then
echo "Clone of $url"
git clone --bare --mirror $url $repodir
# create FETCH_HEAD needed by other scripts
cd $repodir
git fetch --prune
else
cd $repodir
echo "Update of $url"
git fetch --prune
fi
}
function getLastCommit(){
url=$1
repodir=$(getRepoPath $url)
if [ -d $repodir ]
then
cd $repodir
git --no-pager log --full-history --all -1 --pretty=format:"%H%n"
else
echo '-'
fi
}
oldPwd=$(pwd)
updateOrCreate $1
cd $oldPwd

113
old/src/mirror-recursive Executable file
View File

@@ -0,0 +1,113 @@
#!/bin/bash
#
# Mirror git repository with submodules - recursively
#
# Usage:
# mirror-recursive <main repo url> [<depth=1000>]
# strict mode
set -euo pipefail
IFS=$'\n\t'
# Scratch - temp
tmpdir=$(mktemp -d -t mirror-recursive-XXXXXXX)
function finish {
rm -rf "$tmpdir"
}
trap finish EXIT
source $(dirname $(realpath $0))/config
source $(dirname $(realpath $0))/gen-mirror-path
function progress(){
local progress=${1:-100}
echo -n "..$progress%"
if [ $progress -eq 100 ]
then
echo
fi
}
function submoduleDiscovery(){
# main parameters
local repo=$1
local gitdir=$(getRepoPath $repo)
# depth (empty or prefixed from main script)
local depth=${2:-}
# temporary path
local tmpname=$(getRepoUniq $repo)
tmpname=$tmpdir/$tmpname
local tmpCommitList=$tmpname.commits
local tmpSubmoduleList=$tmpname.submodules
# cache paths
local cachePath=$(getRepoCache $repo)
local cacheCommits=$cachePath/commits-checked
local cacheSubmodules=$cachePath/submodules-checked
# check, if cache exists
[ -d $cachePath ] || mkdir -p $cachePath
[ -f $cacheCommits ] || touch $cacheCommits
[ -f $cacheSubmodules ] || touch $cacheSubmodules
# avoid recursion - if commit list exists
# there was activity with this run recently
if [ ! -f $tmpCommitList ]
then
# cache submodules reuse
cat $cacheSubmodules > $tmpSubmoduleList
echo -n "Discovering submodules of $repo.. "
git --git-dir $gitdir log --all $depth --format="%H" | sort > $tmpCommitList
# check against cache
echo -n "cache check.."
comm -13 $cacheCommits $tmpCommitList > $tmpname
mv $tmpname $tmpCommitList
local commits=$(wc -l $tmpCommitList | cut -d' ' -f1)
echo -n "$commits commits"
# this can take long time...
local processed=0
local nextStamp=$(($(date +"%s") + 3))
while read -r line || [[ -n "$line" ]]
do
submodule-describe $gitdir $line | cut -f 3 >> $tmpSubmoduleList
# progress indication
processed=$(($processed + 1))
if [ $(date +"%s") -gt $nextStamp ]
then
progress $((100*$processed/$commits))
nextStamp=$(($nextStamp + 3))
fi
done < $tmpCommitList
# finish the bar
progress
# archive to cache
cat $tmpCommitList $cacheCommits > $tmpname
sort $tmpname > $cacheCommits
sort $tmpSubmoduleList | uniq > $tmpname
cat $tmpname > $cacheSubmodules
# Recursion++
while read -r submodule || [[ -n "$submodule" ]]
do
mirror-main-repo $submodule
submoduleDiscovery $submodule $depth
done < $cacheSubmodules
fi
}
# main repo
mainrepo=$1
depth=${2:-}
[ -n "$depth" ] && depth="-$depth"
# Make first mirror
mirror-main-repo $mainrepo
submoduleDiscovery $mainrepo $depth

73
old/src/sclone Executable file
View File

@@ -0,0 +1,73 @@
#!/bin/bash
#
# Clone repository from mirror - recursively with submodules
#
# Usage:
# sclone <main repo url> [ -p <path> ] [ -c <checkout_ref>
# strict mode
set -euo pipefail
IFS=$'\n\t'
# Scratch - temp
tmpdir=$(mktemp -d -t mirror-recursive-XXXXXXX)
function finish {
rm -rf "$tmpdir"
}
trap finish EXIT
# include config
source $(dirname $(realpath $0))/config
source $(dirname $(realpath $0))/gen-mirror-path
# parse arguments
function usage(){
echo "Usage: $0 [-p <path>] [-c <checkout_ref>]" 1>&2
exit 1
}
[ $# -eq 0 ] && usage
repo=$1
# clone the repo
cclone $@ || true
# skip url
shift
# parse opts
while getopts "c:p:" o; do
case "${o}" in
c)
param_c=${OPTARG}
;;
p)
param_p=${OPTARG}
;;
\?)
usage
;;
esac
done
# change dir and examine the commit + submodules
oldpwd=$(pwd)
submodules=$tmpdir/submodules
cd ${param_p:-$(getRepoUniq $repo)}
submodule-describe . > $submodules
while read -r line || [[ -n "$line" ]]
do
# read -r retypes \t to ' ' (space)
commit=$(echo $line | cut -f1 -d' ')
directory=$(echo $line | cut -f2 -d' ')
url=$(echo $line | cut -f3 -d' ')
# recursion ++
sclone $url -p $directory -c $commit
done < $submodules
cd $oldpwd

66
old/src/submodule-describe Executable file
View File

@@ -0,0 +1,66 @@
#!/bin/bash
#
# Describe submodules in repository, optionally per commit
#
# Usage:
# submodule-describe <path-to-repo> [<commit>]
#
# Output:
# <hash> <path> <url>
# <hash> <path> <url>
#
# everything separated with tabs
# Safe mode
set -euo pipefail
IFS=$'\n\t'
# get config file for specific commit
function getConfigFile() {
git --no-pager show $commit:.gitmodules
}
# parse submodule file from file
function parseSectionNames(){
git config -f $1 --list --name-only | grep '^submodule.' | cut -d. -f2 | sort | uniq
}
# generate line for single submodule
function generateDescription(){
cfgFile=$1
section=$2
path=$(git config -f $cfgFile --get submodule.$section.path)
url=$(git config -f $cfgFile --get submodule.$section.url)
hash=$(git ls-tree -l $commit -- $path | cut -d' ' -f3)
printf "%s\t%s\t%s\n" $hash $path $url
}
# Grab varriables
repodir=$1
commit=${2:-HEAD}
# Go to repo directory
oldPwd=$(pwd)
cd $repodir
# Are there any submodules registered?
test 0 -eq `git ls-tree $commit -- .gitmodules | wc -l` && exit 0
tmpfile=$(mktemp)
getConfigFile $repodir $commit > $tmpfile
for section in $(parseSectionNames $tmpfile)
do
generateDescription $tmpfile $section
done
# Cleanup
rm $tmpfile
# Go back home
cd $oldPwd

18
old/startup-logcat Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
# prepares persmissions for files & starts frontail to tail log files
# make parallel citation shut up
mkdir -p ~executor/.parallel
touch ~executor/.parallel/will-cite
# pid file ownership
touch /var/run/cloner.pid
chown executor:executor /var/run/cloner.pid
# repair ownership
find /data \! -user executor -exec chown executor:executor {} \;
frontail /var/log/syslog /home/executor/cron.log --daemonize --url-path /logs

7
old/startup-logdump Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
# dump journalctl to /var/log/syslog
# logrotate is handled itself
exec journalctl -f > /var/log/syslog

12
old/syslog.logrotate Normal file
View File

@@ -0,0 +1,12 @@
/var/log/syslog {
daily
missingok
rotate 10
compress
delaycompress
notifempty
nocreate
copytruncate
}