This commit is contained in:
2025-10-15 09:00:46 +02:00
parent 2d5de900c4
commit f2aa66a5e1
115 changed files with 12107 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
#/usr/bin/env bash
_applyTemplate_completions () {
declare -a options
OPTIONS=("-help" "-timestamp")
COMPREPLY=()
local CUR OPTIONS_COUNT=0
CUR=${COMP_WORDS[COMP_CWORD]}
# compte les options déjà utilisé et les retire de la liste des possible
for ITEM in ${COMP_WORDS[@]:1}
do
if [[ " ${OPTIONS[*]} " =~ " ${ITEM} " ]] ; then
let "OPTIONS_COUNT++"
OPTIONS=(${OPTIONS[@]/${ITEM}})
else
break
fi
done
# si la position est dans des options "ou juste après"
((COMP_CWORD <= OPTIONS_COUNT+1)) && [[ "${CUR}" =~ ^- ]] && COMPREPLY=( $(compgen -W "${OPTIONS[*]}" -- "${CUR}" ) ) && return 0
# si la position est après des options ou que l'on ne commence pas par "-" on cherche des fichiers
((COMP_CWORD <= OPTIONS_COUNT+2)) && COMPREPLY=($(compgen -f -- "${CUR}")) && return 0
return 0
}
complete -F _applyTemplate_completions applyTemplate.sh

382
bin2/.commonFunctions.sh Executable file
View File

@@ -0,0 +1,382 @@
# commun fonctions for KAZ
#TODO; toutes les fonctions ci-dessous devraient être commentées
#KI : françois
#KOI : tout un tas de trucs utiles pour la gestion de l'infra kaz (à mettre dans chaque script)
#KAN :
# maj le 27/01/2024 by FAB: recherche de tous les srv kaz dispo (via le DNS)
# maj le 15/04/2024 by FAB: correction getPahekoOrgaList
# https://wiki.bash-hackers.org/scripting/terminalcodes
BOLD=''
RED=''
GREEN=''
YELLOW=''
BLUE=''
MAGENTA=''
CYAN=''
NC='' # No Color
NL='
'
########################################
setKazVars () {
# KAZ_ROOT must be set
if [ -z "${KAZ_ROOT}" ]; then
printKazError "\n\n *** KAZ_ROOT not defined! ***\n"
exit
fi
export KAZ_KEY_DIR="${KAZ_ROOT}/secret"
export KAZ_BIN_DIR="${KAZ_ROOT}/bin"
export KAZ_CONF_DIR="${KAZ_ROOT}/config"
export KAZ_CONF_PROXY_DIR="${KAZ_CONF_DIR}/proxy"
export KAZ_COMP_DIR="${KAZ_ROOT}/dockers"
export KAZ_STATE_DIR="${KAZ_ROOT}/state"
export KAZ_GIT_DIR="${KAZ_ROOT}/git"
export KAZ_DNLD_DIR="${KAZ_ROOT}/download"
export KAZ_DNLD_PAHEKO_DIR="${KAZ_DNLD_DIR}/paheko"
export APPLY_TMPL=${KAZ_BIN_DIR}/applyTemplate.sh
export DOCKERS_ENV="${KAZ_CONF_DIR}/dockers.env"
export DOCK_LIB="/var/lib/docker"
export DOCK_VOL="${DOCK_LIB}/volumes"
export DOCK_VOL_PAHEKO_ORGA="${DOCK_LIB}/volumes/paheko_assoUsers/_data/"
export NAS_VOL="/mnt/disk-nas1/docker/volumes/"
}
########################################
printKazMsg () {
# $1 msg
echo -e "${CYAN}${BOLD}$1${NC}"
}
printKazError () {
# $1 msb
echo -e "${RED}${BOLD}$1${NC}"
}
########################################
checkContinue () {
local rep
while : ; do
read -p "Do you want to continue? [yes]" rep
case "${rep}" in
""|[yYoO]* )
break
;;
[Nn]* )
exit
;;
* )
echo "Please answer yes or no."
;;
esac
done
}
checkDockerRunning () {
# $1 docker name
# $2 service name
if ! [[ "$(docker ps -f "name=$1" | grep -w "$1")" ]]; then
printKazError "$2 not running... abort"
return 1
fi
return 0
}
########################################
testValidIp () {
# $1 ip
local ip=$1
local stat=1
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
OIFS=$IFS
IFS='.'
ip=($ip)
IFS=$OIFS
[[ ${ip[0]} -le 255 && ${ip[1]} -le 255 && ${ip[2]} -le 255 && ${ip[3]} -le 255 ]]
stat=$?
fi
return $stat
}
########################################
getValInFile () {
# $1 filename
# $2 varname
grep "^\s*$2\s*=" $1 2>/dev/null | head -1 | sed "s%^\s*$2\s*=\(.*\)$%\1%"
}
getList () {
# $1 filename
(cat "$1"|sort; echo) | sed -e "s/\(.*\)[ \t]*#.*$/\1/" -e "s/^[ \t]*\(.*\)$/\1/" -e "/^$/d"
}
getPahekoPluginList () {
ls "${KAZ_DNLD_PAHEKO_DIR}" | grep -v "paheko-"
}
getPahekoOrgaList () {
# ls "${DOCK_VOL_PAHEKO_ORGA}"
find ${DOCK_VOL_PAHEKO_ORGA} -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort
}
getAvailableComposes () {
ls "${KAZ_COMP_DIR}" | grep -v -- "^.*-orga$"
}
getAvailableOrgas () {
#KI : Fab
#KOI : donne la liste de toutes les orgas pour un serveur donné sinon serveur courant
#KAN : 27/01/2024
#en entrée
SITE_DST="$1"
if [ -n "${SITE_DST}" ];then
ssh -p 2201 root@${SITE_DST}.${domain} "ls \"${KAZ_COMP_DIR}\" | grep -- \"^.*-orga$\""
else
ls "${KAZ_COMP_DIR}" | grep -- "^.*-orga$"
fi
}
getAvailableServices () {
local service
for service in paheko cloud collabora agora wiki wp; do
echo "${service}"
done
}
########################################
filterInList () {
# $* ref list filter
# stdin candidats
local compose
while read compose ; do
if [[ " $* " =~ " ${compose} " ]]; then
echo ${compose}
fi
done | sort -u
}
filterNotInList () {
# $* ref list filter
# stdin candidats
local compose
while read compose ; do
if [[ ! " $* " =~ " ${compose} " ]]; then
echo ${compose}
fi
done | sort -u
}
filterAvailableComposes () {
# $* candidats
local AVAILABLE_COMPOSES=$(getAvailableComposes;getAvailableOrgas)
if [ $# -eq 0 ] ; then
echo ${AVAILABLE_COMPOSES}
fi
local compose
for compose in $*
do
compose=${compose%/}
if [[ ! "${NL}${AVAILABLE_COMPOSES}${NL}" =~ "${NL}${compose}${NL}" ]]; then
local subst=""
for item in ${AVAILABLE_COMPOSES}; do
[[ "${item}" =~ "${compose}" ]] && echo ${item} && subst="${subst} ${item}"
done
if [ -z "${subst}" ] ; then
echo "${RED}${BOLD}Unknown compose: ${compose} not in "${AVAILABLE_COMPOSES}"${NC}" >&2
#exit 1
else
echo "${BLUE}${BOLD}substitute compose: ${compose} => "${subst}"${NC}" >&2
fi
else
echo "${compose}"
fi
done | sort -u
}
########################################
serviceOnInOrga () {
# $1 orga name
# $2 service name
# default value
local composeFile="${KAZ_COMP_DIR}/$1-orga/docker-compose.yml"
if [[ ! -f "${composeFile}" ]]
then
echo "$3"
else
grep -q "$2" "${composeFile}" 2>/dev/null && echo on || echo off
fi
}
########################################
waitUrl () {
# $1 URL to waitfor
# $2 timeout en secondes (optional)
starttime=$(date +%s)
if [[ $(curl -k --connect-timeout 2 -s -D - "$1" -o /dev/null 2>/dev/null | head -n1) != *[23]0[0-9]* ]]; then
printKazMsg "service not available ($1). Please wait..."
echo curl -k --connect-timeout 2 -s -D - "$1" -o /dev/null \| head -n1
while [[ $(curl -k --connect-timeout 2 -s -D - "$1" -o /dev/null 2>/dev/null | head -n1) != *[23]0[0-9]* ]]
do
sleep 5
if [ $# -gt 1 ]; then
actualtime=$(date +%s)
delta=$(($actualtime-$starttime))
[[ $2 -lt $delta ]] && return 1
fi
done
fi
return 0
}
########################################
waitContainerHealthy () {
# $1 ContainerName
# $2 timeout en secondes (optional)
healthy="false"
starttime=$(date +%s)
running="false"
[[ $(docker ps -f name="$1" | grep -w "$1") ]] && running="true"
[[ $running == "true" && $(docker inspect -f {{.State.Health.Status}} "$1") == "healthy" ]] && healthy="true"
if [[ ! $running == "true" || ! $healthy == "true" ]]; then
printKazMsg "Docker not healthy ($1). Please wait..."
while [[ ! $running == "true" || ! $healthy == "true" ]]
do
sleep 5
if [ $# -gt 1 ]; then
actualtime=$(date +%s)
delta=$(($actualtime-$starttime))
[[ $2 -lt $delta ]] && printKazMsg "Docker not healthy ($1)... abort..." && return 1
fi
[[ ! $running == "true" ]] && [[ $(docker ps -f name="$1" | grep -w "$1") ]] && running="true"
[[ $running == "true" && $(docker inspect -f {{.State.Health.Status}} "$1") == "healthy" ]] && healthy="true"
done
fi
return 0
}
########################################
waitContainerRunning () {
# $1 ContainerName
# $2 timeout en secondes (optional)
starttime=$(date +%s)
running="false"
[[ $(docker ps -f name="$1" | grep -w "$1") ]] && running="true"
if [[ ! $running == "true" ]]; then
printKazMsg "Docker not running ($1). Please wait..."
while [[ ! $running == "true" ]]
do
sleep 5
if [ $# -gt 1 ]; then
actualtime=$(date +%s)
delta=$(($actualtime-$starttime))
[[ $2 -lt $delta ]] && printKazMsg "Docker did not start ($1)... abort..." && return 1
fi
[[ ! $running == "true" ]] && [[ $(docker ps -f name="$1" | grep -w "$1") ]] && running="true"
done
fi
return 0
}
########################################
downloadFile () {
# $1 URL to download
# $2 new filename (optional)
if [ $# -lt 1 ] || [ $# -gt 2 ]; then
printKazError "downloadFile: bad arg number"
return
fi
URL=$1
if [ -z "$2" ]; then
FILENAME="$(basename $1)"
else
FILENAME="$2"
fi
if [ ! -f "${FILENAME}" ]; then
printKazMsg " - load ${URL}"
curl -L -o "${FILENAME}" "${URL}"
else
TMP="${FILENAME}.tmp"
rm -f "${TMP}"
curl -L -o "${TMP}" "${URL}"
if ! cmp -s "${TMP}" "${FILENAME}" 2>/dev/null; then
mv "${TMP}" "${FILENAME}"
else
rm -f "${TMP}"
fi
fi
}
unzipInDir () {
# $1 zipfile
# $2 destDir
if [ $# -ne 2 ]; then
printKazError "unzipInDir: bad arg number"
return
fi
if ! [[ $1 == *.zip ]]; then
printKazError "unzipInDir: $1 is not a zip file"
return
fi
if ! [[ -d $2 ]]; then
printKazError "$2 is not destination dir"
return
fi
destName="$2/$(basename "${1%.zip}")"
if [[ -d "${destName}" ]]; then
printKazError "${destName} already exist"
return
fi
tmpDir=$2/tmp-$$
trap 'rm -rf "${tmpDir}"' EXIT
unzip "$1" -d "${tmpDir}"
srcDir=$(ls -1 "${tmpDir}")
case $(wc -l <<< $srcDir) in
0)
printKazError "empty zip file : $1"
rmdir "${tmpDir}"
return
;;
1)
mv "${tmpDir}/${srcDir}" "${destName}"
rmdir "${tmpDir}"
;;
*)
printKazError "zip file $1 is not a tree (${srcDir})"
return
;;
esac
}
########################################
get_Serveurs_Kaz () {
#KI : Fab
#KOI : donne la liste de tous les serveurs kaz sous le format srv1;srv2;srv3;.... en intérogeant le DNS
#KAN : 27/01/2024
liste=`dig -t TXT srv.kaz.bzh +short`
#on nettoie
liste=$(echo "$liste" | sed 's/\;/ /g')
liste=$(echo "$liste" | sed 's/\"//g')
#renvoi srv1 srv2 srv3 ....
echo ${liste}
}
########################################

66
bin2/.container-completion.bash Executable file
View File

@@ -0,0 +1,66 @@
#/usr/bin/env bash
_container_completions () {
KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/..; pwd)
COMPREPLY=()
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]} card=${#COMP_WORDS[@]} i w skip=0
for ((i=1 ; i<cword; i++)) ; do
w="${COMP_WORDS[i]}"
[[ "${w}" == -* ]] && ((skip++))
done
local arg_pos w i cmd= names=
((arg_pos = cword - skip))
for ((i=1 ; i<card; i++)) ; do
w="${COMP_WORDS[i]}"
if [ -z "${cmd}" ] ; then
[[ "${w}" == -* ]] || cmd="${w}"
continue
fi
names="${names} ${w}"
done
case "$cur" in
-*)
COMPREPLY=( $(compgen -W "-h -n" -- "${cur}" ) ) ;;
*)
local cmd_available="status start stop save"
case "${arg_pos}" in
1)
# $1 of container.sh
COMPREPLY=($(compgen -W "${cmd_available}" -- "${cur}"))
;;
*)
# $2-* of container.sh
[[ " ${cmd_available} " =~ " ${cmd} " ]] || return 0
# select set of names
local names_set="available"
case "${cmd}" in
status)
names_set="available"
;;
start)
names_set="disable"
;;
stop)
names_set="enable"
;;
save)
names_set="validate"
;;
esac
local available_args=$("${KAZ_ROOT}/bin/kazList.sh" "compose" "${names_set}")
# remove previous selected target
local proposal item
for item in ${available_args} ; do
[[ " ${names} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=($(compgen -W "${proposal}" -- "${cur}"))
;;
esac
esac
return 0
}
complete -F _container_completions container.sh

19
bin2/.dns-completion.bash Executable file
View File

@@ -0,0 +1,19 @@
#/usr/bin/env bash
_dns_completions () {
local cur find
COMPREPLY=()
cur=${COMP_WORDS[COMP_CWORD]}
case "$cur" in
-*)
COMPREPLY=( $(compgen -W "-h -n -f" -- "${cur}" ) ) ;;
*)
find=""
for arg in ${COMP_WORDS[@]} ; do
[[ " list add del " =~ " ${arg} " ]] && find="arg"
done
[ -z "${find}" ] && COMPREPLY=($(compgen -W "init list add del" -- "${cur}")) ;;
esac
return 0
}
complete -F _dns_completions dns.sh

View File

@@ -0,0 +1,79 @@
#/usr/bin/env bash
_foreign_domain_completions () {
KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
COMPREPLY=()
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]} card=${#COMP_WORDS[@]} i w skip=0
for ((i=1 ; i<cword; i++)) ; do
w="${COMP_WORDS[i]}"
[[ "${w}" == -* ]] && ((skip++))
done
local arg_pos w i cmd= opt1= opt2=
((arg_pos = cword - skip))
for ((i=1 ; i<card; i++)) ; do
w="${COMP_WORDS[i]}"
if [ -z "${cmd}" ] ; then
[[ "${w}" == -* ]] || cmd="${w}"
continue
fi
if [ -z "${opt1}" ] ; then
[[ "${w}" == -* ]] || opt1="${w}"
continue
fi
if [ -z "${opt2}" ] ; then
[[ "${w}" == -* ]] || opt2="${w}"
break
fi
done
case "$cur" in
-*)
COMPREPLY=( $(compgen -W "-h -n" -- "${cur}" ) ) ;;
*)
local cmd_available="list add del"
if [ "${arg_pos}" == 1 ]; then
# $1 of foreign-domain.sh .sh
COMPREPLY=($(compgen -W "${cmd_available}" -- "${cur}"))
else
. "${KAZ_CONF_DIR}/dockers.env"
case "${cmd}" in
"list")
;;
"add")
case "${arg_pos}" in
2)
declare -a availableOrga
availableOrga=($(sed -e "s/\(.*\)[ \t]*#.*$/\1/" -e "s/^[ \t]*\(.*\)-orga$/\1/" -e "/^$/d" "${KAZ_CONF_DIR}/container-orga.list"))
COMPREPLY=($(compgen -W "${availableOrga[*]}" -- "${cur}"))
;;
3)
local availableComposes=$(${KAZ_COMP_DIR}/${opt1}-orga/orga-gen.sh -l)
COMPREPLY=($(compgen -W "${availableComposes[*]}" -- "${cur}"))
;;
esac
;;
"del")
case "${arg_pos}" in
1)
;;
*)
local availableComposes=$(${KAZ_BIN_DIR}/kazList.sh service validate|sed -e "s/\bcollabora\b//" -e "s/ / /")
declare -a availableDomaine
availableDomaine=($(for compose in ${availableComposes[@]} ; do
sed -e "s/[ \t]*\([^#]*\)#.*/\1/g" -e "/^$/d" -e "s/.*server_name[ \t]\([^ ;]*\).*/\1/" "${KAZ_CONF_PROXY_DIR}/${compose}_kaz_name"
done))
COMPREPLY=($(compgen -W "${availableDomaine[*]}" -- "${cur}"))
;;
esac
;;
esac
fi
;;
esac
return 0
}
complete -F _foreign_domain_completions foreign-domain.sh

View File

@@ -0,0 +1,22 @@
#/usr/bin/env bash
_gestContainers_completion () {
KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/..; pwd)
COMPREPLY=()
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]}
case "$cur" in
-*)
local proposal="-h --help -n --simu -q --quiet -m --main -M --nas --local -v --version -l --list -cloud -agora -wp -wiki -office -I --install -r -t -exec --optim -occ -u -i -a -U--upgrade -p --post -mmctl"
COMPREPLY=( $(compgen -W "${proposal}" -- "${cur}" ) )
;;
*)
# orga name
local available_orga=$("${KAZ_BIN_DIR}/kazList.sh" "service" "enable" 2>/dev/null)
COMPREPLY=($(compgen -W "${available_orga}" -- "${cur}"))
;;
esac
return 0
}
complete -F _gestContainers_completion gestContainers.sh

View File

@@ -0,0 +1,51 @@
#/usr/bin/env bash
_kazDockerNet_completion () {
KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/..; pwd)
COMPREPLY=()
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]} card=${#COMP_WORDS[@]} i w skip=0
for ((i=1 ; i<cword; i++)) ; do
w="${COMP_WORDS[i]}"
[[ "${w}" == -* ]] && ((skip++))
done
local arg_pos w i cmd= names=
((arg_pos = cword - skip))
for ((i=1 ; i<card; i++)) ; do
w="${COMP_WORDS[i]}"
if [ -z "${cmd}" ] ; then
[[ "${w}" == -* ]] || cmd="${w}"
continue
fi
names="${names} ${w}"
done
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]}
case "$cur" in
-*)
COMPREPLY=( $(compgen -W "-h -n" -- "${cur}" ) )
;;
*)
local cmd_available="list add"
case "${cword}" in
1)
COMPREPLY=($(compgen -W "${cmd_available}" -- "${cur}"))
;;
*)
[[ "${cmd}" = "add" ]] || return 0
local available_args=$("${KAZ_BIN_DIR}/kazList.sh" "compose" "available" 2>/dev/null)
local used=$("${KAZ_BIN_DIR}/kazDockerNet.sh" "list" | grep "name:" | sed -e "s%\bname:\s*%%" -e "s%\bbridge\b\s*%%" -e "s%Net\b%%g")
local proposal item
for item in ${available_args} ; do
[[ " ${names} " =~ " ${item} " ]] || [[ " ${used} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=($(compgen -W "${proposal}" -- "${cur}"))
;;
esac
;;
esac
return 0
}
complete -F _kazDockerNet_completion kazDockerNet.sh

83
bin2/.kazList-completion.bash Executable file
View File

@@ -0,0 +1,83 @@
#/usr/bin/env bash
_kazList_completions () {
#KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/..; pwd)
COMPREPLY=()
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]} card=${#COMP_WORDS[@]} i w skip=0
for ((i=1 ; i<cword; i++)) ; do
w="${COMP_WORDS[i]}"
[[ "${w}" == -* ]] && ((skip++))
done
local arg_pos w i cmd= opt= names=
((arg_pos = cword - skip))
for ((i=1 ; i<card; i++)); do
w="${COMP_WORDS[i]}"
if [ -z "${cmd}" ]; then
[[ "${w}" == -* ]] || cmd="${w}"
continue
fi
if [ -z "${opt}" ]; then
[[ "${w}" == -* ]] || opt="${w}"
continue
fi
names="${names} ${w}"
done
#(echo "A cword:${cword} / arg_pos:${arg_pos} / card:${card} / cur:${cur} / cmd:${cmd} / opt:${opt} / names:${names} " >> /dev/pts/1)
case "${cur}" in
-*)
COMPREPLY=($(compgen -W "-h --help" -- "${cur}"))
;;
*)
local cmd_available="compose service"
local opt_available="available validate enable disable status"
case "${arg_pos}" in
1)
# $1 of kazList.sh
COMPREPLY=($(compgen -W "${cmd_available}" -- "${cur}"))
;;
2)
# $2 of kazList.sh
COMPREPLY=($(compgen -W "${opt_available}" -- "${cur}"))
;;
*)
# $3-* of kazList.sh
[[ " ${cmd_available} " =~ " ${cmd} " ]] || return 0
# select set of names
local names_set="${opt}"
local available_args
case "${cmd}" in
service)
case "${names_set}" in
available|validate)
return 0
;;
*)
available_args=$("${COMP_WORDS[0]}" "compose" "enable" "orga" 2>/dev/null)
;;
esac
;;
compose)
case "${names_set}" in
validate|enable|disable)
;;
*)
names_set="available"
;;
esac
available_args=$("${COMP_WORDS[0]}" "${cmd}" "${names_set}")
;;
esac
# remove previous selected target
local proposal item
for item in ${available_args} ; do
[[ " ${names} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=($(compgen -W "${proposal}" -- "${cur}"))
;;
esac
esac
return 0
}
complete -F _kazList_completions kazList.sh

View File

@@ -0,0 +1,39 @@
#/usr/bin/env bash
_mv_orga_nas_completion () {
KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/..; pwd)
COMPREPLY=()
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]} card=${#COMP_WORDS[@]} i w skip=0
for ((i=1 ; i<cword; i++)) ; do
w="${COMP_WORDS[i]}"
[[ "${w}" == -* ]] && ((skip++))
done
local arg_pos w i names=
((arg_pos = cword - skip))
for ((i=1 ; i<card; i++)) ; do
w="${COMP_WORDS[i]}"
if [[ "${w}" == -* ]]; then
continue
fi
names="${names} ${w}"
done
local KAZ_LIST="${KAZ_BIN_DIR}/kazList.sh"
case "$cur" in
-*)
local proposal="-h -n"
COMPREPLY=( $(compgen -W "${proposal}" -- "${cur}" ) )
;;
*)
local available_orga=$("${KAZ_LIST}" "compose" "enable" "orga" 2>/dev/null | sed "s/-orga\b//g")
local proposal= item
for item in ${available_orga} ; do
[[ " ${names} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=($(compgen -W "${proposal}" -- "${cur}"))
;;
esac
return 0
}
complete -F _mv_orga_nas_completion mvOrga2Nas.sh

63
bin2/.orga-gen-completion.bash Executable file
View File

@@ -0,0 +1,63 @@
#/usr/bin/env bash
_orga_gen_completion () {
KAZ_ROOT=$(cd "$(dirname ${COMP_WORDS[0]})"/../..; pwd)
ORGA_DIR=$(cd "$(dirname ${COMP_WORDS[0]})"; basename $(pwd))
COMPREPLY=()
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
local cword=${COMP_CWORD} cur=${COMP_WORDS[COMP_CWORD]} card=${#COMP_WORDS[@]} i w skip=0
for ((i=1 ; i<cword; i++)) ; do
w="${COMP_WORDS[i]}"
[[ "${w}" == -* ]] && ((skip++))
[[ "${w}" == +* ]] && ((skip++))
done
local arg_pos w i addOpt= rmOpt= names=
((arg_pos = cword - skip))
for ((i=1 ; i<card; i++)) ; do
w="${COMP_WORDS[i]}"
if [[ "${w}" == -* ]]; then
rmOpt="${rmOpt} ${w}"
continue
fi
if [[ "${w}" == '+'* ]]; then
addOpt="${addOpt} ${w}"
continue
fi
names="${names} ${w}"
done
local KAZ_LIST="${KAZ_BIN_DIR}/kazList.sh"
case "$cur" in
-*)
local available_services item proposal="-h -l" listOpt="available"
[ -n "${names}" ] && listOpt="enable ${names}"
[[ "${ORGA_DIR}" = "orgaTmpl" ]] || listOpt="enable ${ORGA_DIR%-orga}"
available_services=$("${KAZ_LIST}" service ${listOpt} 2>/dev/null | tr ' ' '\n' | sed "s/\(..*\)/-\1/")
for item in ${available_services} ; do
[[ " ${rmOpt} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=( $(compgen -W "${proposal}" -- "${cur}" ) )
;;
'+'*)
local available_services item proposal= listOpt="available"
[ -n "${names}" ] && listOpt="disable ${names}"
[[ "${ORGA_DIR}" = "orgaTmpl" ]] || listOpt="disable ${ORGA_DIR%-orga}"
available_services=$("${KAZ_LIST}" service ${listOpt} 2>/dev/null | tr ' ' '\n' | sed "s/\(..*\)/+\1/")
for item in ${available_services} ; do
[[ " ${addOpt} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=( $(compgen -W "${proposal}" -- "${cur}" ) )
;;
*)
[[ "${ORGA_DIR}" = "orgaTmpl" ]] || return 0;
local available_orga=$("${KAZ_LIST}" "compose" "enable" "orga" 2>/dev/null | sed "s/-orga\b//g")
local proposal= item
for item in ${available_orga} ; do
[[ " ${names} " =~ " ${item} " ]] || proposal="${proposal} ${item}"
done
COMPREPLY=($(compgen -W "${proposal}" -- "${cur}"))
;;
esac
return 0
}
complete -F _orga_gen_completion orga-gen.sh

View File

@@ -0,0 +1,17 @@
#!/bin/bash
KAZ_ROOT=/kaz
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
_borg()
{
local current=${COMP_WORDS[COMP_CWORD]}
case "$current" in
-*)
local_prop="-h -d -i -l -m -u -t -p -v -info"
COMPREPLY=( $(compgen -W "${local_prop}" -- $current) )
;;
esac
}
complete -F _borg scriptBorg.sh

View File

@@ -0,0 +1,11 @@
#/usr/bin/env bash
_update_look_nas_completion () {
COMPREPLY=()
local cur=${COMP_WORDS[COMP_CWORD]}
local THEMES=$(cd "$(dirname ${COMP_WORDS[0]})"/look ; ls -F -- '.' | grep '/$' | sed 's%/%%' | tr '\n' ' ' | sed 's% $%%')
COMPREPLY=($(compgen -W "${THEMES}" -- "${cur}"))
return 0
}
complete -F _update_look_nas_completion updateLook.sh

102
bin2/applyTemplate.sh Executable file
View File

@@ -0,0 +1,102 @@
#!/bin/bash
# Met à jour la configuration de ${CONF} en fonction du modèle ${TMPL}
# Viariables misent à jour :
# - __DOMAIN__
# Il est possible de prendre en considération ou d'occulter des blocks.
# Le début du block est repéré par une ligne contenant {{XXX
# La fin du block est repéré par une ligne contenant }}
# L'affiche est fonction de XXX
# XXX = on => affichage systématique
# XXX = off => masquage systématique
# XXX = compose => affichage si la variable d'environnement proxy_compose à la valeur on
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
usage () {
echo $(basename "$0") " [-h] [-help] [-timestamp] template dst"
echo " -h"
echo " -help Display this help."
echo " -timestamp produce timestamp comment."
}
TIMESTAMP=""
case "$1" in
'-h' | '-help' )
usage
shift
exit;;
'-time' | '-timestamp' )
TIMESTAMP=YES
shift;;
esac
# no more export in .env
PROXY_VARS=$(set | grep "proxy_.*=")
for var in ${PROXY_VARS}
do
export ${var}
done
(
# $1 = template
# $2 = target
if [ "${TIMESTAMP}" == "YES" ]; then
echo "# Generated by $(pwd)$(basename $0)"
echo "# à partir du modèle $1"
echo "#" $(date "+%x %X")
echo
fi
sed \
-e "/^[ \t]*$/d"\
-e "/^[ ]*#.*$/d"\
-e "s|__CACHET_HOST__|${cachetHost}|g"\
-e "s|__CALC_HOST__|${calcHost}|g"\
-e "s|__CLOUD_HOST__|${cloudHost}|g"\
-e "s|__DATE_HOST__|${dateHost}|g"\
-e "s|__DOKUWIKI_HOST__|${dokuwikiHost}|g"\
-e "s|__DOMAIN__|${domain}|g"\
-e "s|__FILE_HOST__|${fileHost}|g"\
# -e "s|__PAHEKO_API_PASSWORD__|${paheko_API_PASSWORD}|g"\
# -e "s|__PAHEKO_API_USER__|${paheko_API_USER}|g"\
-e "s|__PAHEKO_HOST__|${pahekoHost}|g"\
-e "s|__GIT_HOST__|${gitHost}|g"\
-e "s|__GRAV_HOST__|${gravHost}|g"\
-e "s|__HTTP_PROTO__|${httpProto}|g"\
-e "s|__LDAP_HOST__|${ldapHost}|g"\
-e "s|__LDAPUI_HOST__|${ldapUIHost}|g"\
-e "s|__MATTER_HOST__|${matterHost}|g"\
-e "s|__OFFICE_HOST__|${officeHost}|g"\
-e "s|__PAD_HOST__|${padHost}|g"\
-e "s|__QUOTAS_HOST__|${quotasHost}|g"\
-e "s|__SMTP_HOST__|${smtpHost}|g"\
-e "s|__SYMPADB__|${sympaDBName}|g"\
-e "s|__SYMPA_HOST__|${sympaHost}|g"\
# -e "s|__SYMPA_MYSQL_DATABASE__|${sympa_MYSQL_DATABASE}|g"\
# -e "s|__SYMPA_MYSQL_PASSWORD__|${sympa_MYSQL_PASSWORD}|g"\
# -e "s|__SYMPA_MYSQL_USER__|${sympa_MYSQL_USER}|g"\
-e "s|__VIGILO_HOST__|${vigiloHost}|g"\
-e "s|__WEBMAIL_HOST__|${webmailHost}|g"\
-e "s|__CASTOPOD_HOST__|${castopodHost}|g"\
-e "s|__SPIP_HOST__|${spipHost}|g"\
-e "s|__IMAPSYNC_HOST__|${imapsyncHost}|g"\
-e "s|__YAKFORMS_HOST__|${yakformsHost}|g"\
-e "s|__WORDPRESS_HOST__|${wordpressHost}|g"\
-e "s|__MOBILIZON_HOST__|${mobilizonHost}|g"\
-e "s|__API_HOST__|${apiHost}|g"\
-e "s|__VAULTWARDEN_HOST__|${vaultwardenHost}|g"\
-e "s|__DOMAIN_SYMPA__|${domain_sympa}|g"\
$1 | awk '
BEGIN {cp=1}
/}}/ {cp=1 ; next};
/{{on/ {cp=1; next};
/{{off/ {cp=0; next};
match($0, /{{[a-zA-Z0-9_]+/) {cp=(ENVIRON["proxy_" substr($0,RSTART+2,RLENGTH)] == "on"); next};
{if (cp) print $0};'
) > $2

25
bin2/certbot-dns-alwaysdata.sh Executable file
View File

@@ -0,0 +1,25 @@
#/bin/bash
# certbot certonly --manual --preferred-challenges=dns --manual-auth-hook certbot-dns-alwaysdata.sh --manual-cleanup-hook certbot-dns-alwaysdata.sh -d "*.kaz.bzh" -d "kaz.bzh"
export KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. $KAZ_KEY_DIR/env-alwaysdata
DOMAIN_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" ${ALWAYSDATA_API}/domain/?name=${CERTBOT_DOMAIN} | jq '.[0].id')
add_record(){
RECORD_ID=$(curl -s -X POST -d "{\"domain\":\"${DOMAIN_ID}\", \"type\":\"TXT\", \"name\":\"_acme-challenge\", \"value\":\"${CERTBOT_VALIDATION}\"}" --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/")
}
del_record(){
RECORD_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?name=_acme-challenge&type=TXT&domain=${DOMAIN_ID}" | jq ".[0].id")
curl -s -X DELETE --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/${RECORD_ID}/"
}
if [ -z ${CERTBOT_AUTH_OUTPUT} ]; then
add_record
else
del_record
fi

226
bin2/checkEnvFiles.sh Executable file
View File

@@ -0,0 +1,226 @@
#!/bin/bash
export KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
RUN_PASS_DIR="secret"
TMPL_PASS_DIR="secret.tmpl"
NEED_GEN=
########################################
usage () {
echo "Usage: $0 [-n] [-h]"
echo " -h help"
exit 1
}
case "$1" in
'-h' | '-help' )
usage
;;
esac
[ "$#" -eq 0 ] || usage
########################################
# check system
for prg in kompare; do
if ! type "${prg}" > /dev/null; then
printKazError "$0 need ${prg}"
echo "please run \"apt-get install ${prg}\""
exit
fi
done
cd "${KAZ_ROOT}"
########################################
# get lvalues in script
getVars () {
# $1 : filename
grep "^[^#]*=" $1 | sed 's/\([^=]*\).*/\1/' | sort -u
}
# get lvalues in script
getSettedVars () {
# $1 : filename
grep -E "^[^=#]*(USER|PASS|TOKEN|DATABASE|ACCOUNT|LOGIN|KEY)[^#]*=..*" ./* | grep -vE '^[^#=]*=.*@@(user|pass|db|token|gv|cv)@@.*' | sort -u
}
getUnsettedVars () {
# $1 : filename
grep -vE '^[^#=]*=.*@@(user|pass|db|token|gv|cv)@@.*' ./* | sort -u
}
getVarFormVal () {
# $1 searched value
# $2 filename
grep "^[^#]*=$1" $2 | sed 's/\s*\([^=]*\).*/\1/'
}
########################################
# check new files env-*
createMissingEnv () {
# $1 : ref dir
# $2 : target dir
REF_DIR="$1"
TARGET_DIR="$2"
NEED_UPDATE=
declare -a listRef listTarget missing
listRef=($(cd "${REF_DIR}"; ls -1 env-* | grep -v '~$'))
listTarget=($(cd "${TARGET_DIR}"; ls -1 env-* | grep -v '~$'))
missing=($(comm -23 <(printf "%s\n" ${listRef[@]}) <(printf "%s\n" ${listTarget[@]})))
for envFile in ${missing[@]}; do
read -p "Do you want to create ${GREEN}${BOLD}${TARGET_DIR}/${envFile}${NC}? [y/n]: " yn
case $yn in
""|[Yy]*)
cp "${REF_DIR}/${envFile}" "${TARGET_DIR}/${envFile}"
NEED_UPDATE=true
;;
esac
done
}
createMissingEnv "${RUN_PASS_DIR}" "${TMPL_PASS_DIR}"
[ -n "${NEED_UPDATE}" ] && NEED_GEN=true
createMissingEnv "${TMPL_PASS_DIR}" "${RUN_PASS_DIR}"
[ -n "${NEED_UPDATE}" ] && NEED_GEN=true
########################################
# check missing values in env-* between RUN and TMPL
declare -a listTmpl listRun listCommonFiles
listTmplFiles=($(cd "${TMPL_PASS_DIR}"; ls -1 env-* | grep -v '~$'))
listRunFiles=($(cd "${RUN_PASS_DIR}"; ls -1 env-* | grep -v '~$'))
listCommonFiles=($(comm -12 <(printf "%s\n" ${listTmplFiles[@]}) <(printf "%s\n" ${listRunFiles[@]})))
for envFile in ${listCommonFiles[@]}; do
while : ; do
TMPL_FILE="${TMPL_PASS_DIR}/${envFile}"
RUN_FILE="${RUN_PASS_DIR}/${envFile}"
declare -a listRef list2Target missingInRun missingInTmpl
listTmplVars=($(getVars "${TMPL_FILE}"))
listRunVars=($(getVars "${RUN_FILE}"))
missingInTmpl=($(comm -23 <(printf "%s\n" ${listTmplVars[@]}) <(printf "%s\n" ${listRunVars[@]})))
missingInRun=($(comm -13 <(printf "%s\n" ${listTmplVars[@]}) <(printf "%s\n" ${listRunVars[@]})))
if [ -n "${missingInTmpl}" ] || [ -n "${missingInRun}" ]; then
[ -n "${missingInTmpl}" ] &&
echo "missing vars in ${YELLOW}${BOLD}${TMPL_FILE}${NC}:${RED}${BOLD}" ${missingInTmpl[@]} "${NC}"
[ -n "${missingInRun}" ] &&
echo "missing vars in ${YELLOW}${BOLD}${RUN_FILE}${NC}:${RED}${BOLD}" ${missingInRun[@]} "${NC}"
read -p "Do you want to add them? [y/n]: " yn
case $yn in
""|[Yy]*)
emacs "${TMPL_FILE}" "${RUN_FILE}"
[ -n "${missingInTmpl}" ] && NEED_GEN=true
break
;;
[Nn]*)
break
;;
esac
else
break
fi
done
done
########################################
# check empty pass in env-*
for envFile in $(ls -1 "${TMPL_PASS_DIR}/"env-* | grep -v '~$'); do
settedVars=($(getSettedVars "${envFile}"))
if [ -n "${settedVars}" ]; then
echo "unclear password in ${GREEN}${BOLD}${envFile}${NC}:${BLUE}${BOLD}"
for var in ${settedVars[@]}; do
echo -e "\t${var}"
done
echo "${NC}"
read -p "Do you want to clear them? [y/n]: " yn
case $yn in
""|[Yy]*)
emacs "${envFile}"
;;
esac
fi
done
########################################
# check extention in dockers.env
declare -a missing
missing=($(for DIR in "${RUN_PASS_DIR}" "${TMPL_PASS_DIR}"; do
for envFile in $(ls -1 "${DIR}/"env-* | grep -v '~$'); do
val="${envFile#*env-}"
varName=$(getVarFormVal "${val}" "${DOCKERS_ENV}")
if [ -z "${varName}" ]; then
echo "${val}"
fi
done
done | sort -u))
if [ -n "${missing}" ]; then
echo "missing def in ${GREEN}${BOLD}${DOCKERS_ENV}${NC}:${BLUE}${BOLD}"
for var in ${missing[@]}; do
echo -e "\t${var}"
done
echo "${NC}"
read -p "Do you want to add them? [y/n]: " yn
case $yn in
""|[Yy]*)
emacs "${DOCKERS_ENV}"
;;
esac
fi
########################################
# check extention in dockers.env
declare -a missing
unsetted=($(for DIR in "${RUN_PASS_DIR}"; do
for envFile in $(ls -1 "${DIR}/"env-* | grep -v '~$'); do
val="${envFile#*env-}"
varName=$(getVarFormVal "${val}" "${DOCKERS_ENV}")
if [ -z "${varName}" ]; then
echo "${val}"
fi
done
done | sort -u))
if [ -n "${missing}" ]; then
echo "missing def in ${GREEN}${BOLD}${DOCKERS_ENV}${NC}:${BLUE}${BOLD}"
for var in ${missing[@]}; do
echo -e "\t${var}"
done
echo "${NC}"
read -p "Do you want to add them? [y/n]: " yn
case $yn in
""|[Yy]*)
emacs "${DOCKERS_ENV}"
;;
esac
fi
if [ -n "${NEED_GEN}" ]; then
while : ; do
read -p "Do you want to generate missing values? [y/n]: " yn
case $yn in
""|[Yy]*)
"${KAZ_BIN_DIR}/secretGen.sh"
break
;;
[Nn]*)
break
;;
esac
done
fi
# XXX config/dockers.tmpl.env
# XXX ! vérifier init pour dockers.env

48
bin2/checkPahekoLdap.py Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/python3
import sys
from lib.paheko import Paheko
from lib.ldap import Ldap
paheko = Paheko()
categorie_membres = paheko.get_categorie_id("Membres")
membres = paheko.get_users_in_categorie(categorie_membres)
def test_quota(paheko_entry, ldap_entry):
ok = True
quota_disque = paheko_entry["quota_disque"].strip("'")
if f"{quota_disque}G".encode() != ldap_entry[1]['mailQuota'][0]:
ok = False
return ok
def test_mail_secours(paheko_entry, ldap_entry):
try:
if paheko_entry["email_secours"]:
return paheko_entry["email_secours"].strip("'").encode() == ldap_entry[1]['mailDeSecours'][0]
else:
return False
except e:
print(paheko_entry)
print(ldap_entry)
raise e
with Ldap() as ldap:
try:
for membre in membres:
ldap_entry = ldap.get_email(membre["email"])[0]
ok = True
#ok &= test_quota(membre, ldap_entry)
ok &= test_mail_secours(membre, ldap_entry)
if not ok:
print(membre)
print(ldap_entry)
print()
except Exception as e:
print(membre)
print(ldap.get_email(membre["email"]))
raise e

30
bin2/cleanDepot.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
#SIMU=echo
cd /var/lib/docker/volumes/jirafeau_fileData/_data
find links/ -type f -print | while read link ; do
name=$(head -1 "${link}")
#if [[ -z $(head -1 "${link}" | grep "7z$") ]]; then
if [[ -z $(head -9 "${link}" | tail -1) ]]; then
# si c'est pas un 7z on continue
continue;
fi
# recherche le fichier de contenu
filename=$(head -6 "${link}" | tail -1)
l1=$(echo $filename | cut -c 1-8)
l2=$(echo $filename | cut -c 9-16)
l3=$(echo $filename | cut -c 17-24)
l4=$(echo $filename | cut -c 25-32)
# supprime le fichier de contenu
${SIMU} rm -f "files/${l1}/${l2}/${l3}/${l4}/${filename}"
# coupe les branches mortes
${SIMU} rmdir -p "files/${l1}/${l2}/${l3}/${l4}" 2>/dev/null
# supprime le lien
${SIMU} rm -f "${link}"
# log
echo "$(date +%d-%m-%Y-%H-%M-%S) Find ${link} <${name}>"
done

24
bin2/configKaz.sh Executable file
View File

@@ -0,0 +1,24 @@
#!/bin/sh -e
. /usr/share/debconf/confmodule
db_version 2.0
if [ "$1" = "fset" ]; then
db_fset kaz/mode seen false
db_fset kaz/domain seen false
db_go
fi
if [ "$1" = "reset" ]; then
db_reset kaz/mode
db_reset kaz/domain
db_go
fi
#db_set kaz/domain test
db_title "a b c"
db_input critical kaz/mode
db_input critical kaz/domain
db_go

11
bin2/configKaz.sh.templates Executable file
View File

@@ -0,0 +1,11 @@
Template: kaz/mode
Type: select
Choices: prod, dev, local
Default: local
Description: Mode
Template: kaz/domain
Type: string
Description: domain name
Default: kaz.bzh

356
bin2/container.sh Executable file
View File

@@ -0,0 +1,356 @@
#!/bin/bash
#Ki: François
#Kan: 2021
#Koi: gestion dockers
# 15/01/2025: Dernière modif by fab: ne pas redémarrer Traefik en cas de créaio d'orga
# Did : 13 fevrier 2025 modif des save en postgres et mysql
# Did : ajout des sauvegardes de mobilizon et mattermost en postgres
# 20/04/2025
# Did : Ajout des sauvegardes de peertube dans les services generaux
# En cas d'absence de postfix, il faut lancer :
# docker network create postfix_mailNet
# démare/arrête un compose
# sauvegarde la base de données d'un compose
# met à jours les paramètres de configuration du mandataire (proxy)
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
cd "${KAZ_BIN_DIR}"
PATH_SAUVE="/home/sauve/"
export SIMU=""
declare -a availableComposesNoNeedMail availableMailComposes availableComposesNeedMail availableProxyComposes availableOrga
availableComposesNoNeedMail=($(getList "${KAZ_CONF_DIR}/container-withoutMail.list"))
availableMailComposes=($(getList "${KAZ_CONF_DIR}/container-mail.list"))
availableComposesNeedMail=($(getList "${KAZ_CONF_DIR}/container-withMail.list"))
availableProxyComposes=($(getList "${KAZ_CONF_DIR}/container-proxy.list"))
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
availableComposesNeedMail+=( "${availableOrga[@]}" )
knownedComposes+=( ${availableMailComposes[@]} )
knownedComposes+=( ${availableProxyComposes[@]} )
knownedComposes+=( ${availableComposesNoNeedMail[@]} )
knownedComposes+=( ${availableComposesNeedMail[@]} )
usage () {
echo "Usage: $0 [-n] {status|start|stop|save} [compose]..."
echo " -n : simulation"
echo " status : docker-compose status (default all compose available)"
echo " start : start composes (default all compose validate)"
echo " stop : stop composes (default all compose enable)"
echo " save : save all known database"
echo " [compose] : in ${knownedComposes[@]}"
exit 1
}
doCompose () {
# $1 dans ("up -d" "down")
# $2 nom du répertoire du compose
echo "compose: $1 $2"
${SIMU} cd "${KAZ_COMP_DIR}/$2"
if [ ! -h .env ] ; then
echo "create .env in $2"
${SIMU} ln -fs ../../config/dockers.env .env
fi
${SIMU} docker-compose $1
}
doComposes () {
# $1 dans ("up -d" "down")
# $2+ nom des répertoires des composes
cmd=$1
shift
for compose in $@ ; do
doCompose "${cmd}" ${compose}
done
}
updateProxy () {
# $1 dans ("on" "off")
# $2 nom des répertoires des composes
cmd=$1
shift
echo "update proxy ${cmd}: $@"
date=$(date "+%x %X")
for compose in $@ ; do
composeFlag=${compose//-/_}
entry="proxy_${composeFlag}="
newline="${entry}${cmd} # update by $(basename $0) at ${date}"
if ! grep -q "proxy_${composeFlag}=" "${DOCKERS_ENV}" 2> /dev/null ; then
if [[ -n "${SIMU}" ]] ; then
echo "${newline} >> ${DOCKERS_ENV}"
else
echo "${newline}" >> "${DOCKERS_ENV}"
fi
else
${SIMU} sed -i \
-e "s|${entry}.*|${newline}|g" \
"${DOCKERS_ENV}"
fi
done
for item in "${availableProxyComposes[@]}"; do
${SIMU} ${KAZ_COMP_DIR}/${item}/proxy-gen.sh
done
}
saveDB () {
containerName=$1
userName=$2
userPass=$3
dbName=$4
backName=$5
backDbType=$6
#on utilise mysqldump (v=10.5) et mariadb-dump (v>=11.4) et pgdump pour être certain d'avoir un dump. L'une des 3 lignes fera une erreur
# on teste si le backup est pour mysql ou postgres
if [[ -n "${SIMU}" ]] ; then
${SIMU} "[ ${backDbType} = mysql ] && docker exec ${containerName} mysqldump --user=${userName} --password=${userPass} ${dbName} | gzip > $PATH_SAUVE${backName}.sql.gz"
${SIMU} "[ ${backDbType} = mysql ] && docker exec ${containerName} mariadb-dump --user=${userName} --password=${userPass} ${dbName} | gzip > $PATH_SAUVE${backName}.sql.gz"
${SIMU} "[ ${backDbType} = postgres ] && docker exec ${containerName} pg_dumpall --username=${userName} | gzip >${PATH_SAUVE}/${backName}.pgdump.sql.gz"
else
[ ${backDbType} = mysql ] && docker exec ${containerName} mysqldump --user=${userName} --password=${userPass} ${dbName} | gzip > $PATH_SAUVE${backName}.sql.gz
[ ${backDbType} = mysql ] && docker exec ${containerName} mariadb-dump --user=${userName} --password=${userPass} ${dbName} | gzip > $PATH_SAUVE${backName}.sql.gz
[ ${backDbType} = postgres ] && docker exec ${containerName} pg_dumpall --username=${userName} | gzip >${PATH_SAUVE}/${backName}.pgdump.sql.gz
fi
}
declare -a enableComposesNoNeedMail enableMailComposes enableComposesNeedMail enableProxyComposes
enableComposesNoNeedMail=()
enableMailComposes=()
enableComposesNeedMail=()
enableProxyComposes=()
startComposes () {
./kazDockerNet.sh add ${enableComposesNoNeedMail[@]} ${enableProxyComposes[@]} ${enableMailComposes[@]} ${enableComposesNeedMail[@]}
[ ${#enableComposesNeedMail[@]} -ne 0 ] && [[ ! "${enableMailComposes[@]}" =~ "postfix" ]] && ./kazDockerNet.sh add postfix
[[ "${enableComposesNeedMail[@]}" =~ "paheko" ]] && ${SIMU} ${KAZ_COMP_DIR}/paheko/paheko-gen.sh
doComposes "up -d" ${enableComposesNoNeedMail[@]}
doComposes "up -d" ${enableMailComposes[@]}
doComposes "up -d" ${enableComposesNeedMail[@]}
updateProxy "on" ${enableComposesNoNeedMail[@]} ${enableComposesNeedMail[@]}
#fab le 15/01/25: on ne redémarre plus le proxy avec container.sh
#doComposes "up -d" ${enableProxyComposes[@]}
for item in "${enableProxyComposes[@]}"; do
[[ -x "${KAZ_COMP_DIR}/${item}/reload.sh" ]] && ${SIMU} "${KAZ_COMP_DIR}/${item}/reload.sh"
done
if grep -q "^.s*proxy_web.s*=.s*on" "${DOCKERS_ENV}" 2> /dev/null ; then
${SIMU} ${KAZ_COMP_DIR}/web/web-gen.sh
fi
}
stopComposes () {
updateProxy "off" ${enableComposesNoNeedMail[@]} ${enableComposesNeedMail[@]}
doComposes "down" ${enableProxyComposes[@]}
doComposes "down" ${enableComposesNeedMail[@]}
doComposes "down" ${enableMailComposes[@]}
doComposes "down" ${enableComposesNoNeedMail[@]}
if grep -q "^.s*proxy_web.s*=.s*on" "${DOCKERS_ENV}" 2> /dev/null ; then
${SIMU} ${KAZ_COMP_DIR}/web/web-gen.sh
fi
}
statusComposes () {
${KAZ_ROOT}/bin/kazList.sh compose status ${enableMailComposes[@]} ${enableProxyComposes[@]} ${enableComposesNoNeedMail[@]} ${enableComposesNeedMail[@]}
}
saveComposes () {
. "${DOCKERS_ENV}"
savedComposes+=( ${enableMailComposes[@]} )
savedComposes+=( ${enableProxyComposes[@]} )
savedComposes+=( ${enableComposesNoNeedMail[@]} )
savedComposes+=( ${enableComposesNeedMail[@]} )
for compose in ${savedComposes[@]}
do
case "${compose}" in
jirafeau)
# rien à faire (fichiers)
;;
ethercalc)
#inutile car le backup de /var/lib/docker/volumes/ethercalc_calcDB/_data/dump.rdb est suffisant
;;
sympa)
echo "save sympa"
. $KAZ_KEY_DIR/env-sympaDB
saveDB ${sympaDBName} "${DB_MYSQL_USER}" "${DB_MYSQL_PASSWORD}" "${DB_MYSQL_DATABASE}" sympa mysql
;;
web)
# rien à faire (fichiers)
;;
etherpad)
echo "save pad"
. $KAZ_KEY_DIR/env-etherpadDB
saveDB ${etherpadDBName} "${DB_MYSQL_USER}" "${DB_MYSQL_PASSWORD}" "${DB_MYSQL_DATABASE}" etherpad mysql
;;
framadate)
echo "save date"
. $KAZ_KEY_DIR/env-framadateDB
saveDB ${framadateDBName} "${DB_MYSQL_USER}" "${DB_MYSQL_PASSWORD}" "${DB_MYSQL_DATABASE}" framadate mysql
;;
cloud)
echo "save cloud"
. $KAZ_KEY_DIR/env-nextcloudDB
saveDB ${nextcloudDBName} "${DB_MYSQL_USER}" "${DB_MYSQL_PASSWORD}" "${DB_MYSQL_DATABASE}" nextcloud mysql
;;
paheko)
# rien à faire (fichiers)
;;
mattermost)
echo "save mattermost"
. $KAZ_KEY_DIR/env-mattermostDB
saveDB matterPG "${DB_POSTGRES_USER}" "${DB_POSTGRES_PASSWORD}" "${DB_POSTGRES_DB}" mattermost postgres
;;
mobilizon)
echo "save mobilizon"
. $KAZ_KEY_DIR/env-mobilizonDB
saveDB ${mobilizonDBName} "${DB_POSTGRES_USER}" "${DB_POSTGRES_PASSWORD}" "${DB_POSTGRES_DB}" mobilizon postgres
;;
peertube)
echo "save peertube"
. $KAZ_KEY_DIR/env-peertubeDB
saveDB ${peertubeDBName} "${DB_POSTGRES_USER}" "${DB_POSTGRES_PASSWORD}" "${DB_PEERTUBE_DB_HOSTNAME}" peertube postgres
;;
mastodon)
echo "save mastodon"
. $KAZ_KEY_DIR/env-mastodonDB
saveDB ${mastodonDBName} "${DB_POSTGRES_USER}" "${DB_POSTGRES_PASSWORD}" "${DB_POSTGRES_DB}" mastodon postgres
;;
roundcube)
echo "save roundcube"
. $KAZ_KEY_DIR/env-roundcubeDB
saveDB ${roundcubeDBName} "${DB_MYSQL_USER}" "${DB_MYSQL_PASSWORD}" "${DB_MYSQL_DATABASE}" roundcube mysql
;;
vaultwarden)
echo "save vaultwarden"
. $KAZ_KEY_DIR/env-vaultwardenDB
saveDB ${vaultwardenDBName} "${DB_MYSQL_USER}" "${DB_MYSQL_PASSWORD}" "${DB_MYSQL_DATABASE}" vaultwarden mysql
;;
dokuwiki)
# rien à faire (fichiers)
;;
*-orga)
ORGA=${compose%-orga}
echo "save ${ORGA}"
if grep -q "cloud:" "${KAZ_COMP_DIR}/${compose}/docker-compose.yml" 2> /dev/null ; then
echo " => cloud"
. $KAZ_KEY_DIR/orgas/$ORGA/env-nextcloudDB
saveDB "${ORGA}-DB" "${MYSQL_USER}" "${MYSQL_PASSWORD}" "${MYSQL_DATABASE}" "${ORGA}-cloud" mysql
fi
if grep -q "agora:" "${KAZ_COMP_DIR}/${compose}/docker-compose.yml" 2> /dev/null ; then
echo " => mattermost"
. $KAZ_KEY_DIR/orgas/$ORGA/env-mattermostDB
saveDB "${ORGA}-DB" "${MYSQL_USER}" "${MYSQL_PASSWORD}" "${MYSQL_DATABASE}" "${ORGA}-mattermost" mysql
fi
if grep -q "wordpress:" "${KAZ_COMP_DIR}/${compose}/docker-compose.yml" 2> /dev/null ; then
echo " => wordpress"
. $KAZ_KEY_DIR/orgas/$ORGA/env-wpDB
saveDB "${ORGA}-DB" "${MYSQL_USER}" "${MYSQL_PASSWORD}" "${MYSQL_DATABASE}" "${ORGA}-wordpress" mysql
fi
if grep -q "spip:" "${KAZ_COMP_DIR}/${compose}/docker-compose.yml" 2> /dev/null ; then
echo " => spip"
. $KAZ_KEY_DIR/orgas/$ORGA/env-spipDB
saveDB "${ORGA}-DB" "${MYSQL_USER}" "${MYSQL_PASSWORD}" "${MYSQL_DATABASE}" "${ORGA}-spip" mysql
fi
;;
esac
done
}
if [ "$#" -eq 0 ] ; then
usage
fi
if [ "$1" == "-h" ] ; then
usage
shift
fi
if [ "$1" == "-n" ] ; then
export SIMU=echo
shift
fi
DCK_CMD=""
SAVE_CMD=""
case "$1" in
start)
DCK_CMD="startComposes"
shift
;;
stop)
DCK_CMD="stopComposes"
shift
;;
save)
SAVE_CMD="saveComposes"
shift
;;
status)
DCK_CMD="statusComposes"
shift
;;
*)
usage
;;
esac
if [ $# -eq 0 ] ; then
enableComposesNoNeedMail=("${availableComposesNoNeedMail[@]}")
enableMailComposes=("${availableMailComposes[@]}")
enableComposesNeedMail=("${availableComposesNeedMail[@]}")
enableProxyComposes=("${availableProxyComposes[@]}")
else
if [ "${DCK_CMD}" = "startComposes" ] ; then
enableProxyComposes=("${availableProxyComposes[@]}")
fi
fi
for compose in $*
do
compose=${compose%/}
if [[ ! " ${knownedComposes[@]} " =~ " ${compose} " ]]; then
declare -a subst
subst=()
for item in "${knownedComposes[@]}"; do
[[ "${item}" =~ "${compose}" ]] && subst+=(${item})
done
if [ "${subst}" = "" ] ; then
echo
echo "Unknown compose: ${RED}${BOLD}${compose}${NC} not in ${YELLOW}${BOLD}${knownedComposes[*]}${NC}"
echo
exit 1
else
echo "substitute compose: ${YELLOW}${BOLD}${compose} => ${subst[@]}${NC}"
fi
fi
for item in "${availableMailComposes[@]}"; do
[[ "${item}" =~ "${compose}" ]] && enableMailComposes+=("${item}")
done
for item in "${availableProxyComposes[@]}"; do
[[ "${item}" =~ "${compose}" ]] && enableProxyComposes=("${item}")
done
for item in "${availableComposesNoNeedMail[@]}"; do
[[ "${item}" =~ "${compose}" ]] && enableComposesNoNeedMail+=("${item}")
done
for item in "${availableComposesNeedMail[@]}"; do
[[ "${item}" =~ "${compose}" ]] && enableComposesNeedMail+=("${item}")
done
done
[[ ! -z "${DCK_CMD}" ]] && "${DCK_CMD}" && exit 0
[[ ! -z "${SAVE_CMD}" ]] && "${SAVE_CMD}" && exit 0
exit 1

82
bin2/createDBUsers.sh Executable file
View File

@@ -0,0 +1,82 @@
#!/bin/bash
KAZ_ROOT=$(cd $(dirname $0)/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
# pour mise au point
# SIMU=echo
# Améliorations à prévoir
# - donner en paramètre les services concernés (pour limité les modifications)
# - pour les DB si on déclare un nouveau login, alors les privilèges sont créé mais les anciens pas révoqués
. "${DOCKERS_ENV}"
createMysqlUser(){
# $1 = envName
# $2 = containerName of DB
. $KAZ_KEY_DIR/env-$1
# seulement si pas de mdp pour root
# pb oeuf et poule (il faudrait les anciennes valeurs) :
# * si rootPass change, faire à la main
# * si dbName change, faire à la main
checkDockerRunning "$2" "$2" || return
echo "change DB pass on docker $2"
echo "grant all privileges on ${MYSQL_DATABASE}.* to '${MYSQL_USER}' identified by '${MYSQL_PASSWORD}';" | \
docker exec -i $2 bash -c "mysql --user=root --password=${MYSQL_ROOT_PASSWORD}"
}
framadateUpdate(){
[[ "${COMP_ENABLE}" =~ " framadate " ]] || return
if [ ! -f "${DOCK_LIB}/volumes/framadate_dateConfig/_data/config.php" ]; then
return 0
fi
. $KAZ_KEY_DIR/env-framadateDB
. $KAZ_KEY_DIR/env-framadateServ
checkDockerRunning "${framadateServName}" "Framadate" &&
${SIMU} docker exec -ti "${framadateServName}" bash -c -i "htpasswd -bc /var/framadate/admin/.htpasswd ${HTTPD_USER} ${HTTPD_PASSWORD}"
${SIMU} sed -i \
-e "s/^#*const DB_USER[ ]*=.*$/const DB_USER= '${DB_MYSQL_USER}';/g" \
-e "s/^#*const DB_PASSWORD[ ]*=.*$/const DB_PASSWORD= '${DB_MYSQL_PASSWORD}';/g" \
"${DOCK_LIB}/volumes/framadate_dateConfig/_data/config.php"
}
jirafeauUpdate(){
[[ "${COMP_ENABLE}" =~ " jirafeau " ]] || return
if [ ! -f "${DOCK_LIB}/volumes/jirafeau_fileConfig/_data/config.local.php" ]; then
return 0
fi
. $KAZ_KEY_DIR/env-jirafeauServ
SHA=$(echo -n "${_HTTPD_PASSWORD}" | sha256sum | cut -d \ -f 1)
${SIMU} sed -i \
-e "s/'admin_password'[ ]*=>[ ]*'[^']*'/'admin_password' => '${SHA}'/g" \
"${DOCK_LIB}/volumes/jirafeau_fileConfig/_data/config.local.php"
}
####################
# main
createMysqlUser "etherpadDB" "${etherpadDBName}"
createMysqlUser "framadateDB" "${framadateDBName}"
createMysqlUser "giteaDB" "${gitDBName}"
createMysqlUser "mattermostDB" "${mattermostDBName}"
createMysqlUser "nextcloudDB" "${nextcloudDBName}"
createMysqlUser "roundcubeDB" "${roundcubeDBName}"
createMysqlUser "sympaDB" "${sympaDBName}"
createMysqlUser "vigiloDB" "${vigiloDBName}"
createMysqlUser "wpDB" "${wordpressDBName}"
createMysqlUser "vaultwardenDB" "${vaultwardenDBName}"
createMysqlUser "castopodDB" "${castopodDBName}"
createMysqlUser "spipDB" "${spipDBName}"
createMysqlUser "mastodonDB" "${mastodonDBName}"
framadateUpdate
jirafeauUpdate
exit 0

15
bin2/createSrcDocker.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
cd $(dirname $0)
./setOwner.sh
cd ../..
FILE_NAME="/tmp/$(date +'%Y%m%d')-KAZ.tar.bz2"
tar -cjf "${FILE_NAME}" --transform s/secret.tmpl/secret/ \
./kaz/secret.tmpl/ ./kaz/bin ./kaz/config ./kaz/dockers
ls -l "${FILE_NAME}"

5
bin2/createUser.py Executable file
View File

@@ -0,0 +1,5 @@
#!/usr/bin/python3
from lib.user import create_users_from_file
create_users_from_file()

794
bin2/createUser.sh Executable file
View File

@@ -0,0 +1,794 @@
#!/bin/bash
# kan: 30/03/2021
# koi: créer les users dans le système KAZ, le KazWorld, to become a kaznaute, a kaaaaaaaznaute!
# ki : fab
# test git du 02/10/2023 depuis snster
# !!! need by htpasswd
# apt-get install apache2-utils dos2unix
# rechercher tous les TODO du script pour le reste à faire
##########################################################
# fonctionnement :
# vérification de l'existence du fichier des demandes et création si absent
# on garnit les variables
# on vérifie les variables
# on créé un mdp utilisable par tous les services (identifiant : email kaz)
# pour chacun des services KAZ (NC / WP / DOKUWIKI)
# * on vérifie si le sous-domaine existe, on le créé sinon
# * on créé le user et le met admin si nécessaire
# * s'il existe déjà, rollback (y compris sur les autres services)
# pour paheko, on vérifie si le sous-domaine existe, on le créé sinon
# pour mattermost, on créé le user et l'équipe si nécé=essaire, sur l'agora de base
# tout est ok, on créé l'email
# on créé le mail d'inscription avec tout le détail des services créés (url/user)
# on inscrit le user dans la liste infos@${domain_sympa}
# on avertit contact@kaz.bzh et on post dans l'agora/creation_compte
# TODO : utilisez la req sql pour attaquer paheko et créer createUser.txt en auto et modifier le champ dans paheko ACTION de "à créer" à "aucune"
# on récupère toutes les variables et mdp
# on prend comme source des repertoire le dossier du dessus ( /kaz dans notre cas )
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
cd "${KAZ_ROOT}"
. "${DOCKERS_ENV}"
. $KAZ_KEY_DIR/env-ldapServ
. $KAZ_KEY_DIR/env-sympaServ
. $KAZ_KEY_DIR/env-paheko
# DOCK_DIR="${KAZ_COMP_DIR}" # ???
# on détermine le script appelant, le fichier log et le fichier source, tous issus de la même racine
PRG=$(basename $0)
RACINE=${PRG%.sh}
CREATE_ORGA_CMD="${KAZ_CONF_DIR}/orgaTmpl/orga-gen.sh"
mkdir -p "${KAZ_ROOT}/tmp" "${KAZ_ROOT}/log"
# fichier source dans lequel se trouve les infos sur les utilisateurs à créer
FILE="${KAZ_ROOT}/tmp/${RACINE}.txt"
# fichier de log pour
LOG="${KAZ_ROOT}/log/${RACINE}.log"
# TODO : risque si 2 admins lance en même temps
CMD_LOGIN="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-1-LOGIN.sh"
CMD_SYMPA="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-2-SYMPA.sh"
CMD_ORGA="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-3-ORGA.sh"
CMD_PROXY="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-4-PROXY.sh"
CMD_FIRST="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-5-FIRST.sh"
CMD_INIT="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-6-INIT.sh"
CMD_PAHEKO="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-7-PAHEKO.sh"
CMD_MSG="${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-8-MSG.sh"
TEMP_PAHEKO="${KAZ_ROOT}/tmp/${RACINE}.TEMP_PHAEKO.cvs"
URL_SITE="${domain}"
URL_WEBMAIL="${webmailHost}.${domain}"
URL_LISTE="${sympaHost}.${domain}"
URL_AGORA="${matterHost}.${domain}"
URL_MDP="${ldapUIHost}.${domain}"
# URL_PAHEKO="kaz-${pahekoHost}.${domain}"
URL_PAHEKO="${httpProto}://${API_USER}:${API_PASSWORD}@kaz-paheko.${domain}"
availableProxyComposes=($(getList "${KAZ_CONF_DIR}/container-proxy.list"))
NL_LIST=infos@${domain_sympa}
# indiqué dans le mail d'inscription
# (mail+cloud base+agora : max=3, min=2)
NB_SERVICES_BASE=0
# max : 5, min : 0
NB_SERVICES_DEDIES=0
# note qu'on rajoute dans le mail pour les orgas
MESSAGE_MAIL_ORGA_1=""
MESSAGE_MAIL_ORGA_2=""
MESSAGE_MAIL_ORGA_3=""
############################
# Traitement des arguments #
############################
CREATE_ORGA="true"
SIMULATION=YES
usage () {
echo "${PRG} [-h] [-s] [-e] [-v] [-u]"
echo " version 1.0"
echo " Create users in kaz world using ${FILE} as source file. All logs in ${LOG}"
echo " -h Display this help."
echo " -s Simulate. none user created but you can see the result in ${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-*.sh"
echo " -e Execute commands. user or orga will be created !!!"
echo " -u create only user (don't create orga)"
echo " -v or -V display site informations"
}
for ARG in $*; do
case "${ARG}" in
'-h' | '-help' )
usage
shift
exit;;
-s)
shift;;
-e)
SIMULATION=NO
shift;;
-u)
# only user => no orga
CREATE_ORGA=
shift;;
'-v' | '-V' )
echo "${PRG}, root : ${KAZ_ROOT}, on domain : ${URL_SITE}"
exit
;;
*)
usage
echo "${PRG}: ${RED}unknown parameter${NC}"
shift
exit;;
esac
done
##################################
# Inventaire des comptes à créer #
##################################
# la recherche des comptes à créé avec la commande :
# bin/interoPaheko.sh
# création d'un fichier vide
# TODO : même code ici et dans interoPaheko.sh => risque de divergence
if [ ! -s "${FILE}" ];then
echo "${RED}"
echo "ERREUR : le fichier ${FILE} n'existait pas"
echo "Il vient d'être créé. Vous pouvez le compléter."
echo "${NC}"
cat > "${FILE}" <<EOF
# -- fichier de création des comptes KAZ
# --
# -- 1 ligne par compte
# -- champs séparés par ";". les espaces en début et en fin sont enlevés
# -- laisser vide si pas de donnée
# -- pas d'espace dans les variables
# --
# -- ORGA : nom de l'organisation (max 23 car), vide sinon
# -- ADMIN_ORGA : O/N indique si le user est admin de l'orga (va le créer comme admin du NC de l'orga et admin de l'équipe agora)
# -- NC_ORGA : O/N indique si l'orga a demandé un NC
# -- PAHEKO_ORGA : O/N indique si l'orga a demandé un paheko
# -- WP_ORGA : O/N indique si l'orga a demandé un wp
# -- AGORA_ORGA : O/N indique si l'orga a demandé un mattermost
# -- WIKI_ORGA : O/N indique si l'orga a demandé un wiki
# -- NC_BASE : O/N indique si le user doit être inscrit dans le NC de base
# -- GROUPE_NC_BASE : soit null soit le groupe dans le NC de base
# -- EQUIPE_AGORA : soit null soit equipe agora (max 23 car)
# -- QUOTA = (1/10/20/...) en GB
# --
# NOM ; PRENOM ; EMAIL_SOUHAITE ; EMAIL_SECOURS ; ORGA ; ADMIN_ORGA ; NC_ORGA ; PAHEKO_ORGA ; WP_ORGA ; AGORA_ORGA ; WIKI_ORGA ; NC_BASE ; GROUPE_NC_BASE ; EQUIPE_AGORA ; QUOTA
# exemple pour un compte découverte :
# loufoque ; le_mec; loufoque.le-mec@kaz.bzh ; gregomondo@kaz.bzh; ; N; N; N; N; N; N;N;;; 1
# exemple pour un compte asso de l'orga gogol avec le service dédié NC uniquement + une équipe dans l'agora
# loufoque ; le_mec; loufoque.le-mec@kaz.bzh ; gregomondo@kaz.bzh; gogol ; O; O; N; N; N; N;N;;gogol_team; 10
EOF
exit
fi
ALL_LINES=$(sed -e "/^[ \t]*#.*$/d" -e "/^[ \t]*$/d" "${FILE}")
if [ -z "${ALL_LINES}" ];then
usage
echo "${PRG}: ${RED}nothing to do in ${FILE}${NC}"
exit
fi
###################
# Initialisations #
###################
# emails et les alias KAZ déjà créés
TFILE_EMAIL="$(mktemp /tmp/${RACINE}.XXXXXXXXX.TFILE_EMAIL)"
# comptes mattermost
TFILE_MM="$(mktemp /tmp/${RACINE}.XXXXXXXXX.TFILE_MM)"
# l'ident NextCloud
TEMP_USER_NC="$(mktemp /tmp/${RACINE}.XXXXXXXXX.TEMP_USER_NC)"
# le groupe NextCloud
TEMP_GROUP_NC="$(mktemp /tmp/${RACINE}.XXXXXXXXX.TEMP_GROUP_NC)"
# l'ident WP
TEMP_USER_WP="$(mktemp /tmp/${RACINE}.XXXXXXXXX.TEMP_USER_WP)"
trap "rm -f '${TFILE_EMAIL}' '${TFILE_MM}' '${TEMP_USER_NC}' '${TEMP_GROUP_NC}' '${TEMP_USER_WP}'" 0 1 2 3 15
for i in "${CMD_LOGIN}" "${CMD_SYMPA}" "${CMD_ORGA}" "${CMD_PROXY}" "${CMD_FIRST}" "${CMD_INIT}" "${CMD_PAHEKO}" "${CMD_MSG}"; do
echo "#!/bin/bash" > "${i}" && chmod +x "${i}"
done
echo "numero,nom,quota_disque,action_auto" > "${TEMP_PAHEKO}"
echo "curl \"https://${API_USER}:${API_PASSWORD}@kaz-paheko.kaz.bzh/api/user/import\" -T \"${TEMP_PAHEKO}\"" >> "${CMD_PAHEKO}"
echo "on récupère tous les emails (secours/alias/kaz) sur le ldap"
FILE_LDIF=/home/sauve/ldap.ldif
/kaz/bin/ldap/ldap_sauve.sh
gunzip ${FILE_LDIF}.gz -f
grep -aEiorh '([[:alnum:]]+([._-][[:alnum:]]+)*@[[:alnum:]]+([._-][[:alnum:]]+)*\.[[:alpha:]]{2,6})' ${FILE_LDIF} | sort -u > ${TFILE_EMAIL}
echo "récupération des login mattermost... "
docker exec -i mattermostServ bin/mmctl user list --all | grep ":.*(" | cut -d ':' -f 2 | cut -d ' ' -f 2 | sort > "${TFILE_MM}"
dos2unix "${TFILE_MM}"
echo "done"
# se connecter à l'agora pour ensuite pouvoir passer toutes les commandes mmctl
. $KAZ_KEY_DIR/env-mattermostAdmin
echo "docker exec -i mattermostServ bin/mmctl auth login ${httpProto}://${URL_AGORA} --name local-server --username ${mattermost_user} --password ${mattermost_pass}" | tee -a "${CMD_INIT}"
# vérif des emails
regex="^(([A-Za-z0-9]+((\.|\-|\_|\+)?[A-Za-z0-9]?)*[A-Za-z0-9]+)|[A-Za-z0-9]+)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$"
function validator {
if ! [[ "$1" =~ ${regex} ]]; then
# printf "* %-48s \e[1;31m[fail]\e[m\n" "${1}"
(
echo
echo "ERREUR : le paramètre ${RED}${BOLD}$1 n'est pas un email valide${NC} - on stoppe tout - aucun utilisateur de créé"
echo
) | tee -a "${LOG}"
exit 1
fi
}
######################################
# Boucle lecture des comptes à créer #
######################################
echo -e "$(date '+%Y-%m-%d %H:%M:%S') : ${PRG} - sauvegarde des utilisateurs à créer" | tee "${LOG}"
cat "${FILE}" >> "${LOG}"
LDAP_IP=$(docker inspect -f '{{.NetworkSettings.Networks.ldapNet.IPAddress}}' ldapServ)
ALL_ORGA=
while read ligne; do
# | xargs permet de faire un trim
NOM=$(awk -F ";" '{print $1}' <<< "${ligne}" | xargs)
PRENOM=$(awk -F ";" '{print $2}' <<< "${ligne}" | xargs)
declare -A tab_email
tab_email[EMAIL_SOUHAITE]=$(awk -F ";" '{print $3}' <<< "${ligne}" | xargs)
tab_email[EMAIL_SECOURS]=$(awk -F ";" '{print $4}' <<< "${ligne}" | xargs)
ORGA=$(awk -F ";" '{print $5}' <<< "${ligne}" | xargs)
ORGA=${ORGA,,}
declare -A service
service[ADMIN_ORGA]=$(awk -F ";" '{print $6}' <<< "${ligne}" | xargs)
service[NC_ORGA]=$(awk -F ";" '{print $7}' <<< "${ligne}" | xargs)
service[PAHEKO_ORGA]=$(awk -F ";" '{print $8}' <<< "${ligne}" | xargs)
service[WP_ORGA]=$(awk -F ";" '{print $9}' <<< "${ligne}" | xargs)
service[AGORA_ORGA]=$(awk -F ";" '{print $10}' <<< "${ligne}" | xargs)
service[WIKI_ORGA]=$(awk -F ";" '{print $11}' <<< "${ligne}" | xargs)
service[NC_BASE]=$(awk -F ";" '{print $12}' <<< "${ligne}" | xargs)
GROUPE_NC_BASE=$(awk -F ";" '{print $13}' <<< "${ligne}" | xargs)
GROUPE_NC_BASE="${GROUPE_NC_BASE,,}"
EQUIPE_AGORA=$(awk -F ";" '{print $14}' <<< "${ligne}" | xargs)
EQUIPE_AGORA=${EQUIPE_AGORA,,}
QUOTA=$(awk -F ";" '{print $15}' <<< "${ligne}" | xargs)
PASSWORD=$(awk -F ";" '{print $16}' <<< "${ligne}" | xargs)
IDENT_KAZ=$(unaccent utf8 "${PRENOM,,}.${NOM,,}")
#email en minuscule
EMAIL_SOUHAITE=${tab_email[EMAIL_SOUHAITE],,}
EMAIL_SECOURS=${tab_email[EMAIL_SECOURS]}
echo -e "${NL}***************************** traitement de ${ligne}" | tee -a "${LOG}"
###########################
# Vérification des champs #
###########################
for k in "${!tab_email[@]}"; do
validator "${tab_email[${k}]}"
done
# vérif des champs O/N
for k in "${!service[@]}"; do
if [ "${service[${k}]}" != "O" -a "${service[${k}]}" != "N" ]; then
(
echo "${RED}"
echo "${k} : ${service[${k}]}"
echo "ERREUR : le paramètre ${k} accepte O ou N - on stoppe tout - aucun utilisateur de créé"
echo "${NC}"
) | tee -a "${LOG}"
exit 1
fi
done
# taille ORGA et EQUIPE_AGORA
TAILLE_MAX="23"
if [ "${#ORGA}" -gt "${TAILLE_MAX}" ]; then
(
echo "${RED}"
echo "ERREUR : le paramètre ORGA est trop grand : ${ORGA} , taille max : ${TAILLE_MAX} - on stoppe tout - aucun utilisateur de créé"
echo "${NC}"
) | tee -a "${LOG}"
exit 1
fi
if [ "${#ORGA}" -gt "0" ]; then
if [[ "${ORGA}" =~ ^[[:alnum:]-]+$ ]]; then
echo "ok"
else
(
echo "${RED}"
echo "ERREUR : le paramètre ORGA ne contient pas les caractères autorisés : ${ORGA} - on stoppe tout - aucun utilisateur de créé"
echo "${NC}"
) | tee -a "${LOG}"
exit 1
fi
fi
if [ "${#EQUIPE_AGORA}" -gt "${TAILLE_MAX}" ]; then
(
echo "${RED}"
echo "ERREUR : le paramètre EQUIPE_AGORA est trop grand : ${EQUIPE_AGORA} , taille max : ${TAILLE_MAX} - on stoppe tout - aucun utilisateur de créé"
echo "${NC}"
) | tee -a "${LOG}"
exit 1
fi
# vérif quota est entier
if ! [[ "${QUOTA}" =~ ^[[:digit:]]+$ ]]; then
(
echo
echo "ERREUR : ${RED}${BOLD}QUOTA n'est pas numérique : ${QUOTA}${NC} - on stoppe tout - aucun utilisateur de créé"
) | tee -a "${LOG}"
fi
####################################################
# cree un mdp acceptable par postfix/nc/mattermost #
####################################################
if [ -z ${PASSWORD} ]; then
PASSWORD=_`apg -n 1 -m 10 -M NCL -d`_
fi
SEND_MSG_CREATE=
if [ -n "${ORGA}" -a -z "${CREATE_ORGA}" ]; then
# skeep orga
continue
fi
####################################################################
# TODO: Test de l'identKAZ du ldap, il faut l'unicité. si KO, STOP #
####################################################################
###################################
# Création du compe de messagerie #
###################################
# le mail existe t-il déjà ?
if grep -q "^${EMAIL_SOUHAITE}$" "${TFILE_EMAIL}"; then
echo "${EMAIL_SOUHAITE} existe déjà" | tee -a "${LOG}"
else
SEND_MSG_CREATE=true
echo "${EMAIL_SOUHAITE} n'existe pas" | tee -a "${LOG}"
# LDAP, à tester
user=$(echo ${EMAIL_SOUHAITE} | awk -F '@' '{print $1}')
domain=$(echo ${EMAIL_SOUHAITE} | awk -F '@' '{print $2}')
pass=$(mkpasswd -m sha512crypt ${PASSWORD})
echo "echo -e '\n\ndn: cn=${EMAIL_SOUHAITE},ou=users,${ldap_root}\n\
changeType: add\n\
objectclass: inetOrgPerson\n\
objectClass: PostfixBookMailAccount\n\
objectClass: nextcloudAccount\n\
objectClass: kaznaute\n\
sn: ${PRENOM} ${NOM}\n\
mail: ${EMAIL_SOUHAITE}\n\
mailEnabled: TRUE\n\
mailGidNumber: 5000\n\
mailHomeDirectory: /var/mail/${domain}/${user}/\n\
mailQuota: ${QUOTA}G\n\
mailStorageDirectory: maildir:/var/mail/${domain}/${user}/\n\
mailUidNumber: 5000\n\
mailDeSecours: ${EMAIL_SECOURS}\n\
identifiantKaz: ${IDENT_KAZ}\n\
quota: ${QUOTA}\n\
nextcloudEnabled: TRUE\n\
nextcloudQuota: ${QUOTA} GB\n\
mobilizonEnabled: TRUE\n\
agoraEnabled: TRUE\n\
userPassword: {CRYPT}${pass}\n\n' | ldapmodify -c -H ldap://${LDAP_IP} -D \"cn=${LDAP_ADMIN_USERNAME},${ldap_root}\" -x -w ${LDAP_ADMIN_PASSWORD}" | tee -a "${CMD_LOGIN}"
fi
#userPassword: {CRYPT}\$6\$${pass}\n\n\" | ldapmodify -c -H ldap://${LDAP_IP} -D \"cn=${LDAP_CONFIG_ADMIN_USERNAME},${ldap_root}\" -x -w ${LDAP_CONFIG_ADMIN_PASSWORD}" | tee -a "${CMD_LOGIN}"
CREATE_ORGA_SERVICES=""
#############
# NEXTCLOUD #
#############
# on recalcul l'url de NC
if [ "${ORGA}" != "" -a "${service[NC_ORGA]}" == "O" ]; then
URL_NC="${ORGA}-${cloudHost}.${domain}"
# si le cloud de l'orga n'est pas up alors on le créé
nb=$(docker ps | grep "${ORGA}-${cloudHost}" | wc -l)
if [ "${nb}" == "0" ];then
echo " * +cloud +collabora ${ORGA}"
CREATE_ORGA_SERVICES="${CREATE_ORGA_SERVICES} +cloud +collabora"
# installe les plugins initiaux dans "/kaz/bin/gestClouds.sh"
fi
NB_SERVICES_DEDIES=$((NB_SERVICES_DEDIES+1))
else
URL_NC="${cloudHost}.${domain}"
NB_SERVICES_BASE=$((NB_SERVICES_BASE+1))
fi
MESSAGE_MAIL_ORGA_1="${MESSAGE_MAIL_ORGA_1}${NL}* un bureau virtuel pour stocker des fichiers/calendriers/contacts et partager avec vos connaissances : ${httpProto}://${URL_NC}"
# le user existe t-il déjà sur NC ?
. $KAZ_KEY_DIR/env-nextcloudServ
curl -o "${TEMP_USER_NC}" -X GET -H 'OCS-APIRequest:true' "${httpProto}://${NEXTCLOUD_ADMIN_USER}:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/users?search=${IDENT_KAZ}"
if grep -q "<element>${IDENT_KAZ}</element>" "${TEMP_USER_NC}"; then
echo "${IDENT_KAZ} existe déjà sur ${URL_NC}" | tee -a "${LOG}"
else
# on créé l'utilisateur sur NC sauf si c'est le NC général, on ne créé jamais l'utilisateur7
if [ ${URL_NC} != "${cloudHost}.${domain}" ]; then
. $KAZ_KEY_DIR/orgas/$ORGA/env-nextcloudServ
echo "curl -X POST -H 'OCS-APIRequest:true' ${httpProto}://${NEXTCLOUD_ADMIN_USER}:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/users \
-d userid='${IDENT_KAZ}' \
-d displayName='${PRENOM} ${NOM}' \
-d password='${PASSWORD}' \
-d email='${EMAIL_SOUHAITE}' \
-d quota='${QUOTA}GB' \
-d language='fr' \
" | tee -a "${CMD_INIT}"
fi
# s'il est admin de son orga, on le met admin
if [ "${service[ADMIN_ORGA]}" == "O" -a "${ORGA}" != "" -a "${service[NC_ORGA]}" == "O" ]; then
. $KAZ_KEY_DIR/orgas/$ORGA/env-nextcloudServ
echo "curl -X POST -H 'OCS-APIRequest:true' ${httpProto}://${NEXTCLOUD_ADMIN_USER}:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/users/${IDENT_KAZ}/groups -d groupid='admin'" | tee -a "${CMD_INIT}"
fi
# faut-il mettre le user NC dans un groupe particulier sur le NC de base ?
if [ "${GROUPE_NC_BASE}" != "" -a "${service[NC_BASE]}" == "O" ]; then
# ici on travaille à nouveau sur le NC commun, donc on rechoppe les bons mdp
. $KAZ_KEY_DIR/env-nextcloudServ
# le groupe existe t-il déjà ?
curl -o "${TEMP_GROUP_NC}" -X GET -H 'OCS-APIRequest:true' "${httpProto}://${NEXTCLOUD_ADMIN_USER}:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/groups?search=${GROUPE_NC_BASE}"
nb=$(grep "<element>${GROUPE_NC_BASE}</element>" "${TEMP_GROUP_NC}" | wc -l)
if [ "${nb}" == "0" ];then
echo "curl -X POST -H 'OCS-APIRequest:true' ${httpProto}://${NEXTCLOUD_ADMIN_USER}:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/groups -d groupid=${GROUPE_NC_BASE}" | tee -a "${CMD_INIT}"
fi
# puis attacher le user au groupe
echo "curl -X POST -H 'OCS-APIRequest:true' ${httpProto}://${NEXTCLOUD_ADMIN_USER}:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/users/${IDENT_KAZ}/groups -d groupid=${GROUPE_NC_BASE}" | tee -a "${CMD_INIT}"
fi
fi
#############
# WORDPRESS #
#############
# TODO : pour l'utilisation de l'api : https://www.hostinger.com/tutorials/wordpress-rest-api
if [ "${ORGA}" != "" -a "${service[WP_ORGA]}" == "O" ]; then
URL_WP_ORGA="${ORGA}-${wordpressHost}.${domain}"
# si le wp de l'orga n'est pas up alors on le créé
nb=$(docker ps | grep "${ORGA}-${wordpressHost}" | wc -l)
if [ "${nb}" == "0" ];then
echo " * +wp ${ORGA}"
CREATE_ORGA_SERVICES="${CREATE_ORGA_SERVICES} +wp"
fi
NB_SERVICES_DEDIES=$((NB_SERVICES_DEDIES+1))
MESSAGE_MAIL_ORGA_1="${MESSAGE_MAIL_ORGA_1}${NL}* un site web de type wordpress : ${httpProto}://${URL_WP_ORGA}/wp-admin/"
# TODO : vérif existance user
# # le user existe t-il déjà sur le wp ?
# . $KAZ_KEY_DIR/env-wpServ
# curl -o "${TEMP_USER_WP}" -X GET "${httpProto}://${WORDPRESS_ADMIN_USER}:${WORDPRESS_ADMIN_PASSWORD}@${URL_WP_ORGA}/ocs/v1.php/cloud/users?search=${IDENT_KAZ}"
# nb_user_wp_orga=$(grep "<element>${IDENT_KAZ}</element>" "${TEMP_USER_WP}" | wc -l)
# if [ "${nb_user_wp_orga}" != "0" ];then
# (
# echo "${RED}"
# echo "ERREUR : ${IDENT_KAZ} existe déjà sur ${URL_WP_ORGA} - on stoppe tout - aucun utilisateur de créé"
# echo "${NC}"
# ) | tee -a "${LOG}"
#
# # ROLLBACK - on vire le user de NC
# if [ "${nb_user_nc_orga}" != "0" ];then
# (
# echo "${RED}"
# echo "ERREUR : ${IDENT_KAZ} existe déjà sur ${URL_NC} - on stoppe tout - aucun utilisateur de créé"
# echo "${NC}"
# ) | tee -a "${LOG}"
#
# # on supprime l'utilisateur sur NC.
# echo "curl -X DELETE -H 'OCS-APIRequest:true' ${httpProto}://admin:${NEXTCLOUD_ADMIN_PASSWORD}@${URL_NC}/ocs/v1.php/cloud/users \
# -d userid='${IDENT_KAZ}' \
# " | tee -a "${CMD_INIT}"
# fi
#
# exit 1
# fi
# TODO : créer le user et le mettre admin si nécessaire
# if [ "${service[ADMIN_ORGA]}" == "O" ]; then
# :
# else
# :
# fi
fi
############
# PAHEKO #
############
if [ "${ORGA}" != "" -a "${service[PAHEKO_ORGA]}" == "O" ]; then
URL_PAHEKO_ORGA="${ORGA}-${pahekoHost}.${domain}"
# il n'y a pas de docker spécifique paheko (je cree toujours paheko)
echo " * +paheko ${ORGA}"
CREATE_ORGA_SERVICES="${CREATE_ORGA_SERVICES} +paheko"
NB_SERVICES_DEDIES=$((NB_SERVICES_DEDIES+1))
MESSAGE_MAIL_ORGA_1="${MESSAGE_MAIL_ORGA_1}${NL}* un service de gestion adhérents/clients : ${httpProto}://${URL_PAHEKO_ORGA}"
if [ "${service[ADMIN_ORGA]}" == "O" ]; then
MESSAGE_MAIL_ORGA_1="${MESSAGE_MAIL_ORGA_1} (l'installation est à terminer en vous rendant sur le site)"
fi
fi
############
# DOKUWIKI #
############
if [ "${ORGA}" != "" -a "${service[WIKI_ORGA]}" == "O" ]; then
URL_WIKI_ORGA="${ORGA}-${dokuwikiHost}.${domain}"
# si le wiki de l'orga n'est pas up alors on le créé
nb=$(docker ps | grep "${ORGA}-${dokuwikiHost}" | wc -l)
if [ "${nb}" == "0" ];then
echo " * +wiki ${ORGA}"
CREATE_ORGA_SERVICES="${CREATE_ORGA_SERVICES} +wiki"
fi
NB_SERVICES_DEDIES=$((NB_SERVICES_DEDIES+1))
MESSAGE_MAIL_ORGA_1="${MESSAGE_MAIL_ORGA_1}${NL}* un wiki dédié pour votre documentation : ${httpProto}://${URL_WIKI_ORGA}"
# TODO : ??? à voir https://www.dokuwiki.org/devel:xmlrpc:clients
if grep -q "^${IDENT_KAZ}:" "${DOCK_VOL}/orga_${ORGA}-wikiConf/_data/users.auth.php" 2>/dev/null; then
echo "${IDENT_KAZ} existe déjà sur ${URL_WIKI_ORGA}" | tee -a "${LOG}"
else
echo "echo \"${IDENT_KAZ}:$(htpasswd -bnBC 10 "" ${PASSWORD}):${PRENOM} ${NOM}:${EMAIL_SOUHAITE}:admin,user\" >> \"${DOCK_VOL}/orga_${ORGA}-wikiConf/_data/users.auth.php\"" | tee -a "${CMD_INIT}"
fi
fi
##############
# MATTERMOST #
##############
# on ne gère pas la création du docker dédié mattermost
if [ "${ORGA}" != "" -a "${service[AGORA_ORGA]}" == "O" ]; then
echo "# ******************************************************************************" | tee -a "${CMD_INIT}"
echo "# **************************** ATTENTION ***************************************" | tee -a "${CMD_INIT}"
echo "# ******************************************************************************" | tee -a "${CMD_INIT}"
echo "# Mattermost dédié : on ne fait rien." | tee -a "${CMD_INIT}"
echo "# ******************************************************************************" | tee -a "${CMD_INIT}"
fi
if grep -q "^${IDENT_KAZ}$" "${TFILE_MM}" 2>/dev/null; then
echo "${IDENT_KAZ} existe déjà sur mattermost" | tee -a "${LOG}"
else
# on créé le compte mattermost
echo "docker exec -i mattermostServ bin/mmctl user create --email ${EMAIL_SOUHAITE} --username ${IDENT_KAZ} --password ${PASSWORD}" | tee -a "${CMD_LOGIN}"
# et enfin on ajoute toujours le user à l'équipe KAZ et aux 2 channels publiques
echo "docker exec -i mattermostServ bin/mmctl team users add kaz ${EMAIL_SOUHAITE}" | tee -a "${CMD_LOGIN}"
echo "docker exec -i mattermostServ bin/mmctl channel users add kaz:une-question--un-soucis ${EMAIL_SOUHAITE}" | tee -a "${CMD_LOGIN}"
echo "docker exec -i mattermostServ bin/mmctl channel users add kaz:cafe-du-commerce--ouvert-2424h ${EMAIL_SOUHAITE}" | tee -a "${CMD_LOGIN}"
NB_SERVICES_BASE=$((NB_SERVICES_BASE+1))
fi
if [ "${EQUIPE_AGORA}" != "" -a "${EQUIPE_AGORA}" != "kaz" ]; then
# l'équipe existe t-elle déjà ?
nb=$(docker exec mattermostServ bin/mmctl team list | grep -w "${EQUIPE_AGORA}" | wc -l)
if [ "${nb}" == "0" ];then # non, on la créé en mettant le user en admin de l'équipe
echo "docker exec -i mattermostServ bin/mmctl team create --name ${EQUIPE_AGORA} --display_name ${EQUIPE_AGORA} --email ${EMAIL_SOUHAITE}" --private | tee -a "${CMD_INIT}"
fi
# puis ajouter le user à l'équipe
echo "docker exec -i mattermostServ bin/mmctl team users add ${EQUIPE_AGORA} ${EMAIL_SOUHAITE}" | tee -a "${CMD_INIT}"
fi
if [ -n "${CREATE_ORGA_SERVICES}" ]; then
SEND_MSG_CREATE=true
echo "${CREATE_ORGA_CMD}" --create ${CREATE_ORGA_SERVICES} "${ORGA}" | tee -a "${CMD_ORGA}"
echo "${CREATE_ORGA_CMD}" --init ${CREATE_ORGA_SERVICES} "${ORGA}" | tee -a "${CMD_FIRST}"
ALL_ORGA="${ALL_ORGA} ${ORGA}"
fi
##########################
# Inscription newsletter #
##########################
# TODO : utiliser liste sur dev également
# on inscrit le user sur sympa, à la liste infos@${domain_sympa}
# docker exec -i sympaServ /usr/lib/sympa/bin/sympa_soap_client.pl --soap_url=https://listes.kaz.sns/sympasoap --trusted_application=SOAP_USER --trusted_application_password=SOAP_PASSWORD --proxy_vars="USER_EMAIL=contact1@kaz.sns" --service=which
if [[ "${mode}" = "dev" ]]; then
echo "# DEV, on teste l'inscription à sympa"| tee -a "${CMD_SYMPA}"
LISTMASTER=$(echo ${LISTMASTERS} | cut -d',' -f1)
echo "docker exec -i sympaServ /usr/lib/sympa/bin/sympa_soap_client.pl --soap_url=${httpProto}://${URL_LISTE}/sympasoap --trusted_application=${SOAP_USER} --trusted_application_password=${SOAP_PASSWORD} --proxy_vars=\"USER_EMAIL=${LISTMASTER}\" --service=add --service_parameters=\"${NL_LIST},${EMAIL_SOUHAITE}\"" | tee -a "${CMD_SYMPA}"
else
echo "# PROD, on inscrit à sympa"| tee -a "${CMD_SYMPA}"
LISTMASTER=$(echo ${LISTMASTERS} | cut -d',' -f1)
echo "docker exec -i sympaServ /usr/lib/sympa/bin/sympa_soap_client.pl --soap_url=${httpProto}://${URL_LISTE}/sympasoap --trusted_application=${SOAP_USER} --trusted_application_password=${SOAP_PASSWORD} --proxy_vars=\"USER_EMAIL=${LISTMASTER}\" --service=add --service_parameters=\"${NL_LIST},${EMAIL_SOUHAITE}\"" | tee -a "${CMD_SYMPA}"
echo "docker exec -i sympaServ /usr/lib/sympa/bin/sympa_soap_client.pl --soap_url=${httpProto}://${URL_LISTE}/sympasoap --trusted_application=${SOAP_USER} --trusted_application_password=${SOAP_PASSWORD} --proxy_vars=\"USER_EMAIL=${LISTMASTER}\" --service=add --service_parameters=\"${NL_LIST},${EMAIL_SECOURS}\"" | tee -a "${CMD_SYMPA}"
fi
if [ "${service[ADMIN_ORGA]}" == "O" ]; then
MESSAGE_MAIL_ORGA_2="${MESSAGE_MAIL_ORGA_2}Comme administrateur de votre organisation, vous pouvez créer des listes de diffusion en vous rendant sur ${httpProto}://${URL_LISTE}"
fi
###################
# update paheko #
###################
# TODO : problème si 2 comptes partagent le même email souhaité (cela ne devrait pas arriver)
curl -s "https://${API_USER}:${API_PASSWORD}@kaz-paheko.kaz.bzh/api/sql" -d "SELECT numero,nom,quota_disque from users WHERE email='${EMAIL_SOUHAITE}'" | jq '.results[] | .numero,.nom,.quota_disque ' | tr \\n ',' | sed 's/,$/,Aucune\n/' >> "${TEMP_PAHEKO}"
####################
# Inscription MAIL #
####################
if [ "${NB_SERVICES_DEDIES}" != "0" ];then
MESSAGE_MAIL_ORGA_1="${NL} dont ${NB_SERVICES_DEDIES} service(s) dédié(s) pour votre organisation:${NL} ${MESSAGE_MAIL_ORGA_1}"
fi
if [ -z "${SEND_MSG_CREATE}" ]; then
# rien de créé => pas de message
continue
fi
#si admin alors msg pour indiquer qu'il faut nous renvoyer ce qu'il souhaite comme service.
if [ "${service[ADMIN_ORGA]}" == "O" ]; then
MESSAGE_MAIL_ORGA_3="${MESSAGE_MAIL_ORGA_3}En tant qu'association/famille/société. Vous avez la possibilité d'ouvrir, quand vous le voulez, des services kaz, il vous suffit de nous le demander.
Pourquoi n'ouvrons-nous pas tous les services tout de suite ? parce que nous aimons la sobriété et que nous préservons notre espace disque ;)
A quoi sert d'avoir un site web si on ne l'utilise pas, n'est-ce pas ?
Par retour de mail, dites-nous de quoi vous avez besoin tout de suite entre:
* une comptabilité : un service de gestion adhérents/clients
* un site web de type WordPress
* un cloud : bureau virtuel pour stocker des fichiers/calendriers/contacts et partager avec vos connaissances
Une fois que vous aurez répondu à ce mail, votre demande sera traitée manuellement.
"
fi
# on envoie le mail de bienvenue
MAIL_KAZ="Bonjour,
Bienvenue chez KAZ!
Vous disposez de $((${NB_SERVICES_BASE} + ${NB_SERVICES_DEDIES})) services kaz avec authentification :
* une messagerie classique : ${httpProto}://${URL_WEBMAIL}
* une messagerie instantanée pour discuter au sein d'équipes : ${httpProto}://${URL_AGORA}
Votre email et identifiant pour tous ces services : ${EMAIL_SOUHAITE}
Le mot de passe : ${PASSWORD}
Pour changer votre mot de passe de messagerie, c'est ici: ${httpProto}://${URL_MDP}
Si vous avez perdu votre mot de passe, c'est ici: ${httpProto}://${URL_MDP}/?action=sendtoken
Vous pouvez accéder à votre messagerie :
* soit depuis votre webmail : ${httpProto}://${URL_WEBMAIL}
* soit depuis votre bureau virtuel : ${httpProto}://${URL_NC}
* soit depuis un client de messagerie comme thunderbird
${MESSAGE_MAIL_ORGA_3}
Vous avez quelques docs intéressantes sur le wiki de kaz:
* Migrer son site internet wordpress vers kaz
https://wiki.kaz.bzh/wordpress/start#migrer_son_site_wordpress_vers_kaz
* Migrer sa messagerie vers kaz
https://wiki.kaz.bzh/messagerie/gmail/start
* Démarrer simplement avec son cloud
https://wiki.kaz.bzh/nextcloud/start
Votre quota est de ${QUOTA}GB. Si vous souhaitez plus de place pour vos fichiers ou la messagerie, faites-nous signe !
Pour accéder à la messagerie instantanée et communiquer avec les membres de votre équipe ou ceux de kaz : ${httpProto}://${URL_AGORA}/login
${MESSAGE_MAIL_ORGA_2}
Enfin, vous disposez de tous les autres services KAZ où l'authentification n'est pas nécessaire : ${httpProto}://${URL_SITE}
En cas de soucis, n'hésitez pas à poser vos questions sur le canal 'Une question ? un soucis' de l'agora dispo ici : ${httpProto}://${URL_AGORA}
Si vous avez besoin d'accompagnement pour votre site, votre cloud, votre compta, votre migration de messagerie,... nous proposons des formations mensuelles gratuites. Si vous souhaitez être accompagné par un professionnel, nous pouvons vous donner une liste de pros, référencés par KAZ.
À bientôt ;)
La collégiale de KAZ. "
echo "docker exec -i mailServ mailx -a 'Content-Type: text/plain; charset=\"UTF-8\"' -r contact@kaz.bzh -s \"KAZ: confirmation d'inscription\" ${EMAIL_SOUHAITE} ${EMAIL_SECOURS} << EOF
${MAIL_KAZ}
EOF" | tee -a "${CMD_MSG}"
# on envoie le mail de confirmation d'inscription à contact
MAIL_KAZ="*****POST AUTOMATIQUE******
Hello,
${NOM} ${PRENOM} vient d'être inscrit avec l'email ${EMAIL_SOUHAITE}
quota : ${QUOTA}GB
NC_BASE : ${service[NC_BASE]}
groupe NC base : ${GROUPE_NC_BASE}
équipe agora base : ${EQUIPE_AGORA}
email de secours : ${EMAIL_SECOURS}
ORGA : ${ORGA}
ADMIN_ORGA : ${service[ADMIN_ORGA]}
NC_ORGA : ${service[NC_ORGA]}
PAHEKO_ORGA : ${service[PAHEKO_ORGA]}
WP_ORGA : ${service[WP_ORGA]}
AGORA_ORGA : ${service[AGORA_ORGA]}
WIKI_ORGA : ${service[WIKI_ORGA]}
bisou!"
echo "docker exec -i mailServ mailx -a 'Content-Type: text/plain; charset=\"UTF-8\"' -r contact@kaz.bzh -s \"KAZ: confirmation d'inscription\" ${EMAIL_CONTACT} << EOF
${MAIL_KAZ}
EOF" | tee -a "${CMD_MSG}"
echo " # on envoie la confirmation d'inscription sur l'agora " | tee -a "${CMD_MSG}"
echo "docker exec -i mattermostServ bin/mmctl post create kaz:Creation-Comptes --message \"${MAIL_KAZ}\"" | tee -a "${CMD_MSG}"
# fin des inscriptions
done <<< "${ALL_LINES}"
if [[ -n "${ALL_ORGA}" ]]; then
echo "sleep 2" | tee -a "${CMD_PROXY}"
echo "${KAZ_BIN_DIR}/container.sh start ${ALL_ORGA}" | tee -a "${CMD_PROXY}"
for item in "${availableProxyComposes[@]}"; do
echo "cd \"${KAZ_COMP_DIR}/${item}/\"; ./proxy-gen.sh; docker-compose up -d; ./reload.sh " | tee -a "${CMD_PROXY}"
done
fi
###########################
# Lancement des commandes #
###########################
if [ "${SIMULATION}" == "NO" ]; then
echo "on exécute"
"${CMD_LOGIN}"
# on attend qques secondes que le mail soit bien créé avant de continuer (prob de lecture de la BAL : à investiguer)
# je rallonge à 20s car je vois que le docker sympa ne connait pas toujours l'email kaz créé
echo "on attend 20s pour que la création des emails soit certaine"
sleep 20
"${CMD_SYMPA}"
"${CMD_ORGA}"
"${CMD_PROXY}"
"${CMD_FIRST}"
"${CMD_INIT}"
"${CMD_PAHEKO}"
"${CMD_MSG}"
else
echo "Aucune commande n'a été lancée : Possibilité de le faire à la main. cf ${KAZ_ROOT}/tmp/${RACINE}_cmds_to_run-*.sh"
fi
# END

7
bin2/cron-cloud.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/bin/bash
for cloud in $(docker ps | grep -i nextcloudServ |awk '{print $12}')
do
docker exec -u www-data $cloud php cron.php
done

135
bin2/dns.sh Executable file
View File

@@ -0,0 +1,135 @@
#/bin/bash
#koi: gestion des records dns sur AlwaysData
#ki: fanch&gaël&fab
#kan: 06/04/2025
#doc: https://api.alwaysdata.com/v1/record/doc/
#doc: https://help.alwaysdata.com/fr/api/
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
cd "${KAZ_ROOT}"
export PRG="$0"
export IP="127.0.0.1"
export ETC_HOSTS="/etc/hosts"
# no more export in .env
export $(set | grep "domain=")
#TODO: récupérer la liste des services kaz au lieu des les écrire en dur
declare -a forbidenName
forbidenName=(${calcHost} calc ${cloudHost} bureau ${dateHost} date ${dokuwikiHost} dokuwiki ${fileHost} file ${ldapHost} ${pahekoHost} ${gitHost} ${gravHost} ${matterHost} ${officeHost} collabora ${padHost} ${sympaHost} listes ${webmailHost} ${wordpressHost} www ${vigiloHost} form)
export FORCE="NO"
export CMD=""
export SIMU=""
usage(){
echo "Usage: ${PRG} list [sub-domain...]"
echo " ${PRG} [-n] [-f] {add/del} sub-domain..."
echo " -h help"
echo " -n simulation"
echo " -f force protected domain"
exit 1
}
. "${KAZ_KEY_DIR}/env-alwaysdata"
if [[ -z "${ALWAYSDATA_TOKEN}" ]] ; then
echo "no ALWAYSDATA_TOKEN set in ${KAZ_KEY_DIR}/env-alwaysdata"
usage
fi
DOMAIN_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" ${ALWAYSDATA_API}/domain/?name=${domain} | jq '.[0].id')
for ARG in $@
do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-f' )
shift
export FORCE="YES"
;;
'-n' )
shift
export SIMU="echo"
;;
'list'|'add'|'del' )
shift
CMD="${ARG}"
break
;;
* )
usage
;;
esac
done
if [ -z "${CMD}" ]; then
usage
fi
list(){
TARGET=$@
LISTE=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}&type=CNAME&name=${TARGET}" | jq '.[] | "\(.name):\(.value)"')
echo ${LISTE}
}
saveDns () {
mkdir -p /root/dns
${SIMU} curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}" -o /root/dns/dns_save_$(date +'%Y%m%d%H%M%S')
}
badName(){
[[ -z "$1" ]] && return 0;
for item in "${forbidenName[@]}"; do
[[ "${item}" == "$1" ]] && [[ "${FORCE}" == "NO" ]] && return 0
done
return 1
}
add(){
if [ $# -lt 1 ]; then
exit
fi
saveDns $@
declare -a ADDED
for ARG in $@
do
if badName "${ARG}" ; then
echo "can't manage '${ARG}'. Use -f option"
continue
fi
${SIMU} curl -s -X POST -d "{\"domain\":\"${DOMAIN_ID}\", \"type\":\"CNAME\", \"name\":\"${ARG}\", \"value\":\"${site}.${domain}\"}" --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/"
ADDED+=("${ARG}")
done
echo "Domains added to ${domain}: ${ADDED[@]}"
}
del(){
if [ $# -lt 1 ]; then
exit
fi
saveDns $@
declare -a REMOVED
for ARG in $@
do
if badName "${ARG}" ; then
echo "can't manage '${ARG}'. Use -f option"
continue
fi
RECORD_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?name=${ARG}&type=CNAME&domain=${DOMAIN_ID}" | jq ".[] | select(.name==\"${ARG}\").id")
${SIMU} curl -s -X DELETE --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/${RECORD_ID}/"
REMOVED+=("${ARG}")
done
echo "Domains removed from ${domain}: ${REMOVED[@]}"
}
${CMD} $*

135
bin2/dns_alwaysdata.sh Executable file
View File

@@ -0,0 +1,135 @@
#/bin/bash
#koi: gestion des records dns sur AlwaysData
#ki: fanch&gaël&fab
#kan: 06/04/2025
#doc: https://api.alwaysdata.com/v1/record/doc/
#doc: https://help.alwaysdata.com/fr/api/
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
cd "${KAZ_ROOT}"
export PRG="$0"
export IP="127.0.0.1"
export ETC_HOSTS="/etc/hosts"
# no more export in .env
export $(set | grep "domain=")
#TODO: récupérer la liste des services kaz au lieu des les écrire en dur
declare -a forbidenName
forbidenName=(${calcHost} calc ${cloudHost} bureau ${dateHost} date ${dokuwikiHost} dokuwiki ${fileHost} file ${ldapHost} ${pahekoHost} ${gitHost} ${gravHost} ${matterHost} ${officeHost} collabora ${padHost} ${sympaHost} listes ${webmailHost} ${wordpressHost} www ${vigiloHost} form)
export FORCE="NO"
export CMD=""
export SIMU=""
usage(){
echo "Usage: ${PRG} list [sub-domain...]"
echo " ${PRG} [-n] [-f] {add/del} sub-domain..."
echo " -h help"
echo " -n simulation"
echo " -f force protected domain"
exit 1
}
. "${KAZ_KEY_DIR}/env-alwaysdata"
if [[ -z "${ALWAYSDATA_TOKEN}" ]] ; then
echo "no ALWAYSDATA_TOKEN set in ${KAZ_KEY_DIR}/env-alwaysdata"
usage
fi
DOMAIN_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" ${ALWAYSDATA_API}/domain/?name=${domain} | jq '.[0].id')
for ARG in $@
do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-f' )
shift
export FORCE="YES"
;;
'-n' )
shift
export SIMU="echo"
;;
'list'|'add'|'del' )
shift
CMD="${ARG}"
break
;;
* )
usage
;;
esac
done
if [ -z "${CMD}" ]; then
usage
fi
list(){
TARGET=$@
LISTE=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}&type=CNAME&name=${TARGET}" | jq '.[] | "\(.name):\(.value)"')
echo ${LISTE}
}
saveDns () {
mkdir -p /root/dns
${SIMU} curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}" -o /root/dns/dns_save_$(date +'%Y%m%d%H%M%S')
}
badName(){
[[ -z "$1" ]] && return 0;
for item in "${forbidenName[@]}"; do
[[ "${item}" == "$1" ]] && [[ "${FORCE}" == "NO" ]] && return 0
done
return 1
}
add(){
if [ $# -lt 1 ]; then
exit
fi
saveDns $@
declare -a ADDED
for ARG in $@
do
if badName "${ARG}" ; then
echo "can't manage '${ARG}'. Use -f option"
continue
fi
${SIMU} curl -s -X POST -d "{\"domain\":\"${DOMAIN_ID}\", \"type\":\"CNAME\", \"name\":\"${ARG}\", \"value\":\"${site}.${domain}\"}" --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/"
ADDED+=("${ARG}")
done
echo "Domains added to ${domain}: ${ADDED[@]}"
}
del(){
if [ $# -lt 1 ]; then
exit
fi
saveDns $@
declare -a REMOVED
for ARG in $@
do
if badName "${ARG}" ; then
echo "can't manage '${ARG}'. Use -f option"
continue
fi
RECORD_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?name=${ARG}&type=CNAME&domain=${DOMAIN_ID}" | jq ".[] | select(.name==\"${ARG}\").id")
${SIMU} curl -s -X DELETE --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/${RECORD_ID}/"
REMOVED+=("${ARG}")
done
echo "Domains removed from ${domain}: ${REMOVED[@]}"
}
${CMD} $*

209
bin2/dns_gandi.sh Executable file
View File

@@ -0,0 +1,209 @@
#!/bin/bash
# list/ajout/supprime/ un sous-domaine
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
cd "${KAZ_ROOT}"
export PRG="$0"
export IP="127.0.0.1"
export ETC_HOSTS="/etc/hosts"
# no more export in .env
export $(set | grep "domain=")
declare -a forbidenName
forbidenName=(${calcHost} calc ${cloudHost} bureau ${dateHost} date ${dokuwikiHost} dokuwiki ${fileHost} file ${ldapHost} ${pahekoHost} ${gitHost} ${gravHost} ${matterHost} ${officeHost} collabora ${padHost} ${sympaHost} listes ${webmailHost} ${wordpressHost} www ${vigiloHost} form)
export FORCE="NO"
export CMD=""
export SIMU=""
usage(){
echo "Usage: ${PRG} list [sub-domain...]"
echo " ${PRG} [-n] [-f] {add/del} sub-domain..."
echo " -h help"
echo " -n simulation"
echo " -f force protected domain"
exit 1
}
for ARG in $@
do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-f' )
shift
export FORCE="YES"
;;
'-n' )
shift
export SIMU="echo"
;;
'list'|'add'|'del' )
shift
CMD="${ARG}"
break
;;
* )
usage
;;
esac
done
if [ -z "${CMD}" ]; then
usage
fi
. "${KAZ_KEY_DIR}/env-gandi"
if [[ -z "${GANDI_KEY}" ]] ; then
echo
echo "no GANDI_KEY set in ${KAZ_KEY_DIR}/env-gandi"
usage
fi
waitNet () {
if [[ "${domain}" = "kaz.local" ]]; then
return
fi
### wait when error code 503
if [[ $(curl -H "authorization: Apikey ${GANDI_KEY}" --connect-timeout 2 -s -D - "${GANDI_API}" -o /dev/null 2>/dev/null | head -n1) != *200* ]]; then
echo "DNS not available. Please wait..."
while [[ $(curl -H "authorization: Apikey ${GANDI_KEY}" --connect-timeout 2 -s -D - "${GANDI_API}" -o /dev/null 2>/dev/null | head -n1) != *200* ]]
do
sleep 5
done
exit
fi
}
list(){
if [[ "${domain}" = "kaz.local" ]]; then
grep --perl-regex "^${IP}\s.*${domain}" "${ETC_HOSTS}" 2> /dev/null | sed -e "s|^${IP}\s*\([0-9a-z.-]${domain}\)$|\1|g"
return
fi
waitNet
trap 'rm -f "${TMPFILE}"' EXIT
TMPFILE="$(mktemp)" || exit 1
if [[ -n "${SIMU}" ]] ; then
${SIMU} curl -X GET "${GANDI_API}/records" -H "authorization: Apikey ${GANDI_KEY}"
else
curl -X GET "${GANDI_API}/records" -H "authorization: Apikey ${GANDI_KEY}" 2>/dev/null | \
sed "s/,{/\n/g" | \
sed 's/.*rrset_name":"\([^"]*\)".*rrset_values":\["\([^"]*\)".*/\1:\2/g'| \
grep -v '^[_@]'| \
grep -e ":${domain}\.*$" -e ":prod[0-9]*$" > ${TMPFILE}
fi
if [ $# -lt 1 ]; then
cat ${TMPFILE}
else
for ARG in $@
do
cat ${TMPFILE} | grep "${ARG}.*:"
done
fi
}
saveDns () {
for ARG in $@ ; do
if [[ "${ARG}" =~ .local$ ]] ; then
echo "${PRG}: old fasion style (remove .local at the end)"
usage;
fi
if [[ "${ARG}" =~ .bzh$ ]] ; then
echo "${PRG}: old fasion style (remove .bzh at the end)"
usage;
fi
if [[ "${ARG}" =~ .dev$ ]] ; then
echo "${PRG}: old fasion style (remove .dev at the end)"
usage;
fi
done
if [[ "${domain}" = "kaz.local" ]]; then
return
fi
waitNet
${SIMU} curl -X POST "${GANDI_API}/snapshots" -H "authorization: Apikey ${GANDI_KEY}" 2>/dev/null
}
badName(){
[[ -z "$1" ]] && return 0;
for item in "${forbidenName[@]}"; do
[[ "${item}" == "$1" ]] && [[ "${FORCE}" == "NO" ]] && return 0
done
return 1
}
add(){
if [ $# -lt 1 ]; then
exit
fi
saveDns $@
declare -a ADDED
for ARG in $@
do
if badName "${ARG}" ; then
echo "can't manage '${ARG}'. Use -f option"
continue
fi
case "${domain}" in
kaz.local )
if grep -q --perl-regex "^${IP}.*[ \t]${ARG}.${domain}" "${ETC_HOSTS}" 2> /dev/null ; then
break
fi
if grep -q --perl-regex "^${IP}[ \t]" "${ETC_HOSTS}" 2> /dev/null ; then
${SIMU} sudo sed -i -e "0,/^${IP}[ \t]/s/^\(${IP}[ \t]\)/\1${ARG}.${domain} /g" "${ETC_HOSTS}"
else
${SIMU} sudo sed -i -e "$ a ${IP}\t${ARG}.${domain}" "${ETC_HOSTS}" 2> /dev/null
fi
;;
*)
${SIMU} curl -X POST "${GANDI_API}/records" -H "authorization: Apikey ${GANDI_KEY}" -H 'content-type: application/json' -d '{"rrset_type":"CNAME", "rrset_name":"'${ARG}'", "rrset_values":["'${site}'"]}'
echo
;;
esac
ADDED+=("${ARG}")
done
echo "Domains added to ${domain}: ${ADDED[@]}"
}
del(){
if [ $# -lt 1 ]; then
exit
fi
saveDns $@
declare -a REMOVED
for ARG in $@
do
if badName "${ARG}" ; then
echo "can't manage '${ARG}'. Use -f option"
continue
fi
case "${domain}" in
kaz.local )
if !grep -q --perl-regex "^${IP}.*[ \t]${ARG}.${domain}" "${ETC_HOSTS}" 2> /dev/null ; then
break
fi
${SIMU} sudo sed -i -e "/^${IP}[ \t]*${ARG}.${domain}[ \t]*$/d" \
-e "s|^\(${IP}.*\)[ \t]${ARG}.${domain}|\1|g" "${ETC_HOSTS}"
;;
* )
${SIMU} curl -X DELETE "${GANDI_API}/records/${ARG}" -H "authorization: Apikey ${GANDI_KEY}"
echo
;;
esac
REMOVED+=("${ARG}")
done
echo "Domains removed from ${domain}: ${REMOVED[@]}"
}
#echo "CMD: ${CMD} $*"
${CMD} $*

176
bin2/dynDNS.sh Executable file
View File

@@ -0,0 +1,176 @@
#!/bin/bash
# nohup /kaz/bin/dynDNS.sh &
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
# no more export in .env
export $(set | grep "domain=")
cd "${KAZ_ROOT}"
export PRG="$0"
export MYHOST="${site}"
MYIP_URL="https://kaz.bzh/myip.php"
DNS_IP=""
DELAI_WAIT=10 # DNS occupé
DELAI_GET=5 # min entre 2 requêtes
DELAI_CHANGE=3600 # propagation 1h
DELAI_NO_CHANGE=300 # pas de changement 5 min
BOLD='\e[1m'
RED='\e[0;31m'
GREEN='\e[0;32m'
YELLOW='\e[0;33m'
BLUE='\e[0;34m'
MAGENTA='\e[0;35m'
CYAN='\e[0;36m'
NC='\e[0m' # No Color
NL='
'
export VERBOSE=""
export SIMU=""
usage(){
echo "Usage: ${PRG} list [sub-domain...]"
echo " -h help"
echo " -v verbose"
echo " -n simulation"
exit 1
}
#. "${KAZ_KEY_DIR}/env-gandi"
. "${KAZ_KEY_DIR}/env-alwaysdata"
if [[ -z "${ALWAYSDATA_TOKEN}" ]] ; then
echo "no ALWAYSDATA_TOKEN set in ${KAZ_KEY_DIR}/env-alwaysdata"
usage
fi
DOMAIN_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" ${ALWAYSDATA_API}/domain/?name=${domain} | jq '.[0].id')
if [[ -z "${DOMAIN_ID}" ]] ; then
echo "no DOMAIN_ID give by alwaysdata"
usage
fi
# if [[ -z "${GANDI_KEY}" ]] ; then
# echo
# echo "no GANDI_KEY set in ${KAZ_KEY_DIR}/env-gandi"
# usage
# exit
# fi
for ARG in $@
do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-v' )
shift
export VERBOSE=":"
;;
'-n' )
shift
export SIMU="echo"
;;
* )
usage
;;
esac
done
log () {
echo -e "${BLUE}$(date +%d-%m-%Y-%H-%M-%S)${NC} : $*"
}
simu () {
echo -e "${YELLOW}$(date +%d-%m-%Y-%H-%M-%S)${NC} : $*"
}
cmdWait () {
#ex gandi
#curl -H "authorization: Apikey ${GANDI_KEY}" --connect-timeout 2 -s -D - -o /dev/null "${GANDI_API}" 2>/dev/null
curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" --connect-timeout 2 -D - -o /dev/null "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}&type=CNAME&name=${TARGET}" 2>/dev/null
}
waitNet () {
### wait when error code 503
if [[ $(cmdWait | head -n1) != *200* ]]; then
log "DNS not available. Please wait..."
while [[ $(cmdWait | head -n1) != *200* ]]; do
[[ -z "${VERBOSE}" ]] || simu curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" --connect-timeout 2 -D - -o /dev/null "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}&type=CNAME&name=${TARGET}"
sleep "${DELAI_WAIT}"
done
exit
fi
}
getDNS () {
# curl -s -X GET "${GANDI_API}/records" -H "authorization: Apikey ${GANDI_KEY}"|
# sed "s/,{/\n/g"|
# sed 's/.*rrset_name":"\([^"]*\)".*rrset_values":\["\([^"]*\)".*/\1:\2/g'|
# grep -e "^${MYHOST}:"|
# sed "s/^${MYHOST}://g" |
# tr -d '\n\t\r '
${SIMU} curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}&type=A&name=${MYHOST}" | jq '.[] | "\(.value)"' | tr -d '"'
}
saveDns () {
mkdir -p /root/dns
${SIMU} curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?domain=${DOMAIN_ID}" -o /root/dns/dns_save_$(date +'%Y%m%d%H%M%S')
}
setDNS () {
saveDns
# curl -s -X POST "${GANDI_API}/records" -H "authorization: Apikey ${GANDI_KEY}" -H 'content-type: application/json' -d '{"rrset_type":"A", "rrset_name":"'${MYHOST}'", "rrset_values":["'${IP}'"]}'
${SIMU} curl -s -X POST -d "{\"domain\":\"${DOMAIN_ID}\", \"type\":\"A\", \"name\":\"${MYHOST}\", \"value\":\"${IP}\"}" --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/"
}
while :; do
sleep "${DELAI_GET}"
IP=$(curl -s "${MYIP_URL}" | grep -Eo '([0-9]{1,3}\.){3}[0-9]{1,3}' | tr -d '\n\t\r ')
if ! [[ ${IP} =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
log "BAB IP ${IP}" ; continue
fi
if [ -z "${DNS_IP}" ]; then
# Variable pas encore initialisée
waitNet
DNS_IP=$(getDNS)
if [ -z "${DNS_IP}" ]; then
# C'est la première fois que le site est en prod
log "set ${MYHOST} : ${IP}"
setDNS
DNS_IP=$(getDNS)
log "DNS set ${MYHOST}:${IP} (=${DNS_IP})"
sleep "${DELAI_CHANGE}"
continue
fi
fi
if [ "${DNS_IP}" != "${IP}" ]; then
log "${MYHOST} : ${DNS_IP} must change to ${IP}"
# Changement d'adresse
waitNet
#curl -s -X DELETE "${GANDI_API}/records/${MYHOST}" -H "authorization: Apikey ${GANDI_KEY}"
RECORD_ID=$(curl -s -X GET --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/?name=${MYHOST}&type=A&domain=${DOMAIN_ID}" | jq ".[] | select(.name==\"${MYHOST}\").id")
${SIMU} curl -s -X DELETE --basic --user "${ALWAYSDATA_TOKEN} account=${ALWAYSDATA_ACCOUNT}:" "${ALWAYSDATA_API}/record/${RECORD_ID}/"
setDNS
DNS_IP=$(getDNS)
log "DNS reset ${MYHOST}:${IP} (=${DNS_IP})"
sleep "${DELAI_CHANGE}"
else
log "OK ${MYHOST}:${DNS_IP} / ${IP}"
sleep ${DELAI_NO_CHANGE}
fi
done

66
bin2/envoiMails.sh Executable file
View File

@@ -0,0 +1,66 @@
#!/bin/bash
#kan: 09/09/2022
#koi: envoyer un mail à une liste (sans utiliser sympa sur listes.kaz.bzh)
#ki : fab
#on récupère toutes les variables et mdp
# on prend comme source des repertoire le dossier du dessus ( /kaz dans notre cas )
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
SIMULATION=NO
CMD="/tmp/envoiMail_cmds_to_run.sh"
echo "#!/bin/bash" > ${CMD} && chmod +x ${CMD}
#################################################################################################################
MAIL_KAZ="
KAZ, association morbihannaise, propose de \"dégoogliser\" linternet avec des solutions et services numériques libres alternatifs à ceux des GAFAM. Elle invite les habitants et les élus de Vannes et de sa région à une réunion publique dinformation et déchange :
Le jeudi 22 septembre 2022 à 18 heures
à la Maison des associations, 31 rue Guillaume Le Bartz à Vannes.
Cette invitation est destinée à toute personne sensible aux enjeux du numérique, aux risques pour la démocratie et les libertés, à sa participation au dérèglement climatique et à lépuisement des ressources de notre planète.
Nous dirons qui nous sommes, quelles sont nos valeurs et quels sont concrètement les solutions et services que nous proposons, leurs conditions daccès et laccompagnement utile pour leur prise en main.
Les premières pierres de KAZ ont été posées voilà bientôt deux ans, en pleine pandémie de la COVID, par sept citoyens qui ont voulu répondre à lappel de Framasoft de dégoogliser linternet. Ne plus se lamenter sur la puissance des GAFAM, mais proposer des solutions et des services numériques alternatifs à ceux de Google, Amazon, Facebook, Apple et Microsoft en sappuyant sur les fondamentaux Sobre, Libre, Éthique et Local.
A ce jour, près de 200 particuliers ont ouvert une adresse @kaz.bz, plus de 80 organisations ont souscrit au bouquet de services de KAZ et près de 800 collaborateurs dorganisations utilisatrices des services de KAZ participent peu ou prou au réseau de KAZ.
Beaucoup de services sont gratuits et accessibles sur le site https://kaz.bzh. Dautres demandent louverture dun compte moyennant une petite participation financière de 10€ par an pour les particuliers et de 30€ par an pour les organisations. Ceci est permis par le bénévolat des membres de la collégiale qui dirigent lassociation et administrent les serveurs et les services.
A ce stade, de nombreuses questions se posent à KAZ. Quelle ambition de couverture de ses services auprès des particuliers et des organisations sur son territoire, le Morbihan ? Quel accompagnement dans la prise en main des outils ? Quels nouveaux services seraient utiles ?
Nous serions heureux de votre participation à notre réunion publique du 22 septembre d'une durée totale de 2h :
* Présentation valeurs / contexte (15mn)
* Présentation outils (45mn)
* Questions / Réponses (1h)
En restant à votre disposition,
La Collégiale de KAZ
"
#################################################################################################################
FIC_MAILS="/tmp/fic_mails"
while read EMAIL_CIBLE
do
echo "docker exec -i mailServ mailx -a 'Content-Type: text/plain; charset=\"UTF-8\"' -r contact@kaz.bzh -s \"Invitation rencontre KAZ jeudi 22 septembre à 18h00 à la Maison des assos à Vannes\" ${EMAIL_CIBLE} << EOF
${MAIL_KAZ}
EOF" | tee -a ${CMD}
echo "sleep 2" | tee -a ${CMD}
done < ${FIC_MAILS}
# des commandes à lancer ?
if [ "${SIMULATION}" == "NO" ];then
echo "on exécute"
${CMD}
else
echo "Aucune commande n'a été lancée: Possibilité de le faire à la main. cf ${CMD}"
fi

240
bin2/foreign-domain.sh Executable file
View File

@@ -0,0 +1,240 @@
#!/bin/bash
# list/ajout/supprime/ les domaines extérieurs à kaz.bzh
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
export PRG="$0"
cd $(dirname $0)
. "${DOCKERS_ENV}"
LETS_DIR="/etc/letsencrypt/$([ "${mode}" == "local" ] && echo "local" || echo "live")"
declare -a availableComposes availableOrga
availableComposes=(${pahekoHost} ${cloudHost} ${dokuwikiHost} ${wordpressHost} ${matterHost} ${castopodHost})
availableOrga=($(sed -e "s/\(.*\)[ \t]*#.*$/\1/" -e "s/^[ \t]*\(.*\)-orga$/\1/" -e "/^$/d" "${KAZ_CONF_DIR}/container-orga.list"))
availableProxyComposes=($(getList "${KAZ_CONF_DIR}/container-proxy.list"))
# no more export in .env
export $(set | grep "domain=")
export CMD=""
export SIMU=""
export CHANGE=""
usage(){
echo "Usage: ${PRG} list [friend-domain...]"
echo " ${PRG} [-n] add orga [${pahekoHost} ${cloudHost} ${dokuwikiHost} ${wordpressHost} ${matterHost} ${castopodHost}] [friend-domain...] "
echo " ${PRG} [-n] del [friend-domain...]"
echo " ${PRG} -l"
echo " -l short list"
echo " -renewAll"
echo " -h help"
echo " -n simulation"
exit 1
}
export CERT_CFG="${KAZ_CONF_PROXY_DIR}/foreign-certificate"
createCert () {
(
fileName="${LETS_DIR}/$1-key.pem"
#[ -f "${fileName}" ] || return
# if [ -f "${fileName}" ]; then
# fileTime=$(stat --format='%Y' "${fileName}")
# current_time=$(date +%s)
# if (( "${fileTime}" > ( "${current_time}" - ( 60 * 60 * 24 * 89 ) ) )); then
# exit
# fi
# fi
printKazMsg "create certificat for $1"
${SIMU} docker exec -i proxyServ bash -c "/opt/certbot/bin/certbot certonly -n --nginx -d $1"
)
}
for ARG in $@; do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-n' )
shift
export SIMU="echo"
;;
'-renewAll')
for i in $("${KAZ_BIN_DIR}/foreign-domain.sh" -l); do
echo "$i"
createCert "$i" |grep failed
done
exit
;;
'-l')
for compose in ${availableComposes[@]} ; do
grep "server_name" "${KAZ_CONF_PROXY_DIR}/${compose}_kaz_name" | sed -e "s/[ \t]*\([^#]*\)#.*/\1/g" -e "/^$/d" -e "s/.*server_name[ \t]\([^ ;]*\).*/\1/"
done
exit
;;
'list'|'add'|'del' )
shift
CMD="${ARG}"
break
;;
* )
usage
;;
esac
done
if [ -z "${CMD}" ]; then
echo "Commande missing"
usage
fi
########################################
badDomaine () {
[[ -z "$1" ]] && return 0;
[[ ! "$1" =~ ^[-.a-zA-Z0-9]*$ ]] && return 0;
return 1
}
badOrga () {
[[ -z "$1" ]] && return 0;
[[ ! " ${availableOrga[*]} " =~ " $1 " ]] && return 0
return 1
}
badCompose () {
[[ -z "$1" ]] && return 0;
[[ ! " ${availableComposes[*]} " =~ " $1 " ]] && return 0
return 1
}
########################################
listServ () {
for compose in ${availableComposes[@]} ; do
sed -e "s/[ \t]*\([^#]*\)#.*/\1/g" -e "/^$/d" -e "s/.*server_name[ \t]\([^ ;]*\).*/\1 : ${compose}/" "${KAZ_CONF_PROXY_DIR}/${compose}_kaz_name"
done
}
listOrgaServ () {
for compose in ${availableComposes[@]} ; do
sed -e "s/[ \t]*\([^#]*\)#.*/\1/g" -e "/^$/d" -e "s/\([^ ]*\)[ \t]*\([^ \t;]*\).*/\1 => \2 : ${compose}/" "${KAZ_CONF_PROXY_DIR}/${compose}_kaz_map"
done
}
########################################
list () {
previousOrga=$(listOrgaServ)
previousServ=$(listServ)
if [ $# -lt 1 ]; then
[ -n "${previousOrga}" ] && echo "${previousOrga}"
[ -n "${previousServ}" ] && echo "${previousServ}"
return
fi
for ARG in $@
do
orga=$(echo "${previousOrga}" | grep "${ARG}.* =>")
serv=$(echo "${previousServ}" | grep "${ARG}.* =>")
[ -n "${orga}" ] && echo "${orga}"
[ -n "${serv}" ] && echo "${serv}"
done
}
########################################
add () {
# $1 : orga
# $2 : service
# $3 : friend-domain
[ $# -lt 3 ] && usage
badOrga $1 && echo "bad orga: ${RED}$1${NC} not in ${GREEN}${availableOrga[@]}${NC}" && usage
badCompose $2 && echo "bad compose: ${RED}$2${NC} not in ${GREEN}${availableComposes[@]}${NC}" && usage
ORGA=$1
COMPOSE=$2
shift; shift
CLOUD_SERVNAME="${ORGA}-${nextcloudServName}"
CLOUD_CONFIG="${DOCK_VOL}/orga_${ORGA}-cloudConfig/_data/config.php"
# XXX check compose exist in orga ?
# /kaz/bin/kazList.sh service enable ${ORGA}
if [ "${COMPOSE}" = "${cloudHost}" ]; then
if ! [[ "$(docker ps -f name=${CLOUD_SERVNAME} | grep -w ${CLOUD_SERVNAME})" ]]; then
printKazError "${CLOUD_SERVNAME} not running... abort"
exit
fi
fi
for FRIEND in $@; do
badDomaine "${FRIEND}" && echo "bad domaine: ${RED}${FRIEND}${NC}" && usage
done
for FRIEND in $@; do
createCert "${FRIEND}"
if [ "${COMPOSE}" = "${cloudHost}" ]; then
IDX=$(awk 'BEGIN {flag=0; cpt=0} /trusted_domains/ {flag=1} /)/ {if (flag) {print cpt+1; exit 0}} / => / {if (flag && cpt<$1) cpt=$1}' "${CLOUD_CONFIG}")
${SIMU} docker exec -ti -u 33 "${CLOUD_SERVNAME}" /var/www/html/occ config:system:set trusted_domains "${IDX}" --value="${FRIEND}"
fi
previousOrga=$(listOrgaServ | grep "${FRIEND}")
[[ " ${previousOrga}" =~ " ${FRIEND} => ${ORGA} : ${COMPOSE}" ]] && echo " - already done" && continue
[[ " ${previousOrga}" =~ " ${FRIEND} " ]] && echo " - ${YELLOW}${BOLD}$(echo "${previousOrga}" | grep -e "${FRIEND}")${NC} must be deleted before" && return
if [[ -n "${SIMU}" ]] ; then
echo "${FRIEND} ${ORGA}; => ${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_map"
cat <<EOF
=> ${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_name
server_name ${FRIEND};
EOF
else
echo "${FRIEND} ${ORGA};" >> "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_map"
cat >> "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_name" <<EOF
server_name ${FRIEND};
EOF
fi
echo "${PRG}: ${FRIEND} added"
CHANGE="add"
done
#(cd "${KAZ_COMP_DIR}/${ORGA}-orga"; docker-compose restart)
}
########################################
del () {
[ $# -lt 1 ] && usage
for FRIEND in $@; do
badDomaine "${FRIEND}" && echo "bad domaine: ${RED}${FRIEND}${NC}" && usage
previous=$(listOrgaServ | grep -e "${FRIEND}")
[[ ! "${previous}" =~ ^${FRIEND} ]] && echo "${FRIEND} not found in ${previous}" && continue
# XXX if done OK
for COMPOSE in ${availableComposes[@]} ; do
if grep -q -e "^[ \t]*${FRIEND}[ \t]" "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_map" ; then
if [ "${COMPOSE}" = "${cloudHost}" ]; then
ORGA="$(grep "${FRIEND}" "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_map" | sed "s/^${FRIEND}\s*\([^;]*\);/\1/")"
CLOUD_CONFIG="${DOCK_VOL}/orga_${ORGA}-cloudConfig/_data/config.php"
${SIMU} sed -e "/\d*\s*=>\s*'${FRIEND}'/d" -i "${CLOUD_CONFIG}"
fi
${SIMU} sed -e "/^[ \t]*${FRIEND}[ \t]/d" -i "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_map"
fi
if grep -q -e "^[ \t]*server_name ${FRIEND};" "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_name" ; then
${SIMU} sed -i "${KAZ_CONF_PROXY_DIR}/${COMPOSE}_kaz_name" \
-e "/^[ \t]*server_name ${FRIEND};/d"
fi
done
echo "${PRG}: ${FRIEND} deleted"
CHANGE="del"
done
}
########################################
${CMD} $@
if [ -n "${CHANGE}" ] ; then
echo "Reload proxy conf"
for item in "${availableProxyComposes[@]}"; do
${SIMU} ${KAZ_COMP_DIR}/${item}/proxy-gen.sh
${SIMU} "${KAZ_COMP_DIR}/proxy/reload.sh"
done
fi
########################################

659
bin2/gestContainers.sh Executable file
View File

@@ -0,0 +1,659 @@
#!/bin/bash
# Script de manipulation des containers en masse
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
. $KAZ_ROOT/secret/env-kaz
PRG=$(basename $0)
#######################################
# tout est dans le env_kaz
# NAS_VOL="/mnt/disk-nas1/docker/volumes/"
# OPERATE_ON_MAIN= # par defaut NON on ne traite que des orgas
# OPERATE_ON_NAS_ORGA="OUI" # par defaut oui, on va aussi sur les orgas du NAS
# OPERATE_LOCAL_ORGA="OUI" # par defaut oui
# TEMPO_ACTION_STOP=2 # Lors de redémarrage avec tempo, on attend après le stop
# TEMPO_ACTION_START=60 # Lors de redémarrage avec tempo, avant de reload le proxy
# DEFAULTCONTAINERS="cloud agora wp wiki office paheko castopod spip"
# APPLIS_PAR_DEFAUT="tasks calendar contacts bookmarks mail richdocuments external drawio snappymail ransomware_protection" #rainloop richdocumentscode
# QUIET="1" # redirection des echo
###################################################################################################################
#GLOBAL VARS
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
availableContainersCommuns=( $(getList "${KAZ_CONF_DIR}/container-withMail.list") $(getList "${KAZ_CONF_DIR}/container-withoutMail.list"))
CONTAINERS_TYPES=
declare -A DockerServNames # le nom des containers correspondant
DockerServNames=( [cloud]="${nextcloudServName}" [agora]="${mattermostServName}" [wiki]="${dokuwikiServName}" [wp]="${wordpressServName}" [office]="${officeServName}" [paheko]="${pahekoServName}" [castopod]="${castopodServName}" [spip]="${spipServName}" )
declare -A FilterLsVolume # Pour trouver quel volume appartient à quel container
FilterLsVolume=( [cloud]="cloudMain" [agora]="matterConfig" [wiki]="wikiConf" [wp]="wordpress" [castopod]="castopodMedia" [spip]="spip")
declare -A composeDirs # Le nom du repertoire compose pour le commun
composeDirs=( [cloud]="cloud" [agora]="mattermost" [wiki]="dokuwiki" [office]="collabora" [paheko]="paheko" [castopod]="castopod" [spip]="spip")
declare -A serviceNames # Le nom du du service dans le dockerfile d'orga
serviceNames=( [cloud]="cloud" [agora]="agora" [wiki]="dokuwiki" [wp]="wordpress" [office]="collabora" [castopod]="castopod" [spip]="spip")
declare -A subScripts
subScripts=( [cloud]="manageCloud.sh" [agora]="manageAgora.sh" [wiki]="manageWiki.sh" [wp]="manageWp.sh" [castopod]="manageCastopod.sh" )
declare -A OrgasOnNAS
declare -A OrgasLocales
declare -A NbOrgas
declare -A RunningOrgas
declare -A Posts
OCCCOMANDS=()
MMCTLCOMANDS=()
EXECCOMANDS=()
usage() {
echo "${PRG} [OPTION] [CONTAINERS_TYPES] [COMMANDES] [ORGAS]
Ce script regroupe l'ensemble des opérations que l'on souhaite automatiser sur plusieurs containers
Par defaut, sur les orgas, mais on peut aussi ajouter les communs
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
-m|--main Traite aussi le container commun (cloud commun / agora commun / wiki commun)
-M Ne traite que le container commun, et pas les orgas
--nas Ne traite QUE les orgas sur le NAS
--local Ne traite pas les orgas sur le NAS
-v|--version Donne la version des containers et signale les MàJ
-l|--list Liste des containers (up / down, local ou nas) de cette machine
CONTAINERS_TYPES
-cloud Pour agir sur les clouds
-agora Pour agir sur les agoras
-wp Les wp
-wiki Les wiki
-office Les collabora
-paheko Le paheko
-castopod Les castopod
-spip Les spip
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du container
-t Redémarre avec tempo (docker-compose down puis sleep ${TEMPO_ACTION_STOP} puis up puis sleep ${TEMPO_ACTION_START})
-r Redémarre sans tempo (docker restart)
-exec \"command\" Envoie une commande docker exec
--optim Lance la procédure Nextcloud pour optimiser les performances ** **
-occ \"command\" Envoie une commande via occ ** **
-u Mets à jour les applis ** SPECIFIQUES **
-i Install des applis ** CLOUD **
-a \"app1 app2 ...\" Choix des appli à installer ou mettre à jour (entre guillemets) ** **
-U|--upgrade Upgrade des clouds ** **
-mmctl \"command\" Envoie une commande via mmctl ** SPECIFIQUES **
-p|--post \"team\" \"message\" Poste un message dans une team agora ** AGORA **
ORGAS
[orga1 orga2 ... ] on peut filtrer parmi : ${AVAILABLE_ORGAS}
Exemples :
${PRG} -office -m -r restart de tous les collaboras (libére RAM)
${PRG} -cloud -u -r -q -n Affiche toutes les commandes (-n -q ) pour mettre à jour toutes les applis des clouds + restart (-u -r)
${PRG} -p \"monorga:town-square\" \"Hello\" monorga # envoie Hello sur le centreville de l'orga monorga sur son mattermost dédié
Valeurs par défaut :
Tempo de Stop : ${TEMPO_ACTION_STOP}
Tempo de restart : ${TEMPO_ACTION_START}
"
}
####################################################
################ fonctions clefs ###################
####################################################
_populate_lists(){
# récupère les listes d'orga à traiter
# on rempli les tableaux OrgasOnNAS / OrgasLocales / NbOrgas ... par type de container
if [ -z "${CONTAINERS_TYPES}" ]; then
# wow, on traite tout le monde d'un coup...
CONTAINERS_TYPES=${DEFAULTCONTAINERS}
fi
for TYPE in ${CONTAINERS_TYPES}; do
if [ -n "${FilterLsVolume[$TYPE]}" ] ; then # on regarde dans les volumes
[ -n "$OPERATE_ON_NAS_ORGA" ] && OrgasOnNAS["$TYPE"]=$( _getListOrgas ${NAS_VOL} ${FilterLsVolume[$TYPE]} )
[ -n "$OPERATE_LOCAL_ORGA" ] && OrgasLocales["$TYPE"]=$( _getListOrgas ${DOCK_VOL} ${FilterLsVolume[$TYPE]} "SANSLN")
else # un docker ps s'il n'y a pas de volumes
[ -n "$OPERATE_LOCAL_ORGA" ] && OrgasLocales["$TYPE"]=$(docker ps --format '{{.Names}}' | grep ${DockerServNames[$TYPE]} | sed -e "s/-*${DockerServNames[$TYPE]}//")
fi
NbOrgas["$TYPE"]=$(($(echo ${OrgasOnNAS["$TYPE"]} | wc -w) + $(echo ${OrgasLocales["$TYPE"]} | wc -w)))
RunningOrgas["$TYPE"]=$(docker ps --format '{{.Names}}' | grep ${DockerServNames[$TYPE]} | sed -e "s/-*${DockerServNames[$TYPE]}//")
done
}
_getListOrgas(){
# retrouve les orgas à partir des volume présents
# $1 where to lookup
# $2 filter
# $3 removeSymbolicLinks
[ ! -d $1 ] || [ -z "$2" ] && return 1 # si le repertoire n'existe pas on skip
LIST=$(ls "${1}" | grep -i orga | grep -i "$2" | sed -e "s/-${2}$//g" | sed -e 's/^orga_//')
[ -n "$3" ] && LIST=$(ls -F "${1}" | grep '/' | grep -i orga | grep -i "$2" | sed -e "s/-${2}\/$//g" | sed -e 's/^orga_//')
LIST=$(comm -12 <(printf '%s\n' ${LIST} | sort) <(printf '%s\n' ${AVAILABLE_ORGAS} | sort))
echo "$LIST"
}
_executeFunctionForAll(){
# Parcours des container et lancement des commandes
# Les commandes ont en derniers paramètres le type et l'orga et une string parmi KAZ/ORGANAS/ORGALOCAL pour savoir sur quoi on opère
# $1 function
# $2 nom de la commande
# $3 quel types de containers
# $4 params : quels paramètres à passer à la commande (les clefs sont #ORGA# #DOCKERSERVNAME# #SURNAS# #ISMAIN# #TYPE# #COMPOSEDIR# )
for TYPE in ${3}; do
if [ -n "$OPERATE_ON_MAIN" ]; then
if [[ -n "${composeDirs[$TYPE]}" && "${availableContainersCommuns[*]}" =~ "${composeDirs[$TYPE]}" ]]; then # pas de cloud / agora / wp / wiki sur cette instance
Dockername=${DockerServNames[$TYPE]}
PARAMS=$(echo $4 | sed -e "s/#ORGA#//g;s/#DOCKERSERVNAME#/$Dockername/g;s/#ISMAIN#/OUI/g;s/#SURNAS#/NON/g;s/#TYPE#/$TYPE/g;s%#COMPOSEDIR#%${KAZ_COMP_DIR}/${composeDirs[$TYPE]}%g" )
echo "-------- $2 $TYPE COMMUN ----------------------------" >& $QUIET
eval "$1" $PARAMS
fi
fi
if [[ ${NbOrgas[$TYPE]} -gt 0 ]]; then
echo "-------- $2 des $TYPE des ORGAS ----------------------------" >& $QUIET
COMPTEUR=1
if [ -n "$OPERATE_LOCAL_ORGA" ]; then
for ORGA in ${OrgasLocales[$TYPE]}; do
Dockername=${ORGA}-${DockerServNames[$TYPE]}
PARAMS=$(echo $4 | sed -e "s/#ORGA#/${ORGA}/g;s/#DOCKERSERVNAME#/$Dockername/g;s/#ISMAIN#/NON/g;s/#SURNAS#/NON/g;s/#TYPE#/$TYPE/g;s%#COMPOSEDIR#%${KAZ_COMP_DIR}/${ORGA}-orga%g" )
echo "${RED} ${ORGA}-orga ${NC}($COMPTEUR/${NbOrgas[$TYPE]})" >& $QUIET
eval "$1" $PARAMS
COMPTEUR=$((COMPTEUR + 1))
done
fi
if [ -n "$OPERATE_ON_NAS_ORGA" ]; then
for ORGA in ${OrgasOnNAS[$TYPE]}; do
Dockername=${ORGA}-${DockerServNames[$TYPE]}
PARAMS=$(echo $4 | sed -e "s/#ORGA#/${ORGA}/g;s/#DOCKERSERVNAME#/$Dockername/g;s/#ISMAIN#/NON/g;s/#SURNAS#/OUI/g;s/#TYPE#/$TYPE/g;s%#COMPOSEDIR#%${KAZ_COMP_DIR}/${ORGA}-orga%g" )
echo "${RED} ${ORGA}-orga ${NC}($COMPTEUR/${NbOrgas[$TYPE]})" >& $QUIET
eval "$1" $PARAMS
COMPTEUR=$((COMPTEUR + 1))
done
fi
fi
done
}
##############################################
################ COMMANDES ###################
##############################################
Init(){
# Initialisation des containers
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Initialisation" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_initContainer" "Initialisation" "${CONTAINERS_TYPES[@]}" "#TYPE# #ISMAIN# #SURNAS# #ORGA# "
}
restart-compose() {
# Parcours les containers et redémarre avec tempo
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "DOCKER-COMPOSE DOWN puis sleep ${TEMPO_ACTION_STOP}" >& $QUIET
echo "DOCKER-COMPOSE UP puis sleep ${TEMPO_ACTION_START}" >& $QUIET
echo "de ${CONTAINERS_TYPES} pour $NB_ORGAS_STR" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_restartContainerAvecTempo" "Restart" "${CONTAINERS_TYPES[@]}" "#TYPE# #ISMAIN# #COMPOSEDIR#"
${SIMU} sleep ${TEMPO_ACTION_START}
_reloadProxy
echo "--------------------------------------------------------" >& $QUIET
echo "${GREEN}FIN${NC} " >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
}
restart() {
# Parcours les containers et redémarre
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "DOCKER RESTART des ${CONTAINERS_TYPES} pour $NB_ORGAS_STR" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_restartContainer" "Restart" "${CONTAINERS_TYPES[@]}" "#DOCKERSERVNAME#"
_reloadProxy
echo "--------------------------------------------------------" >& $QUIET
echo "${GREEN}FIN${NC} " >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
}
version(){
# Parcours les containers et affiche leurs versions
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "VERSIONS" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_versionContainer" "Version" "${CONTAINERS_TYPES[@]}" "#TYPE# #ISMAIN# #ORGA#"
}
listContainers(){
echo "${NC}--------------------------------------------------------"
echo "LISTES"
echo "------------------------------------------------------------"
for TYPE in ${CONTAINERS_TYPES}; do
echo "****************** $TYPE ****************"
_listContainer "$TYPE"
done
}
######################## Fonctions génériques #######################
_initContainer(){
# $1 type
# $2 COMMUN
# $3 ON NAS
# $4 orgas
if [ -n "${subScripts[$1]}" ] ; then
evalStr="${KAZ_BIN_DIR}/${subScripts[$1]} --install"
if [ "$3" = "OUI" ]; then evalStr="${evalStr} -nas" ; fi
if [ ! "$QUIET" = "1" ]; then evalStr="${evalStr} -q" ; fi
if [ -n "$SIMU" ]; then evalStr="${evalStr} -n" ; fi
if [ ! "$2" = "OUI" ]; then evalStr="${evalStr} $4" ; fi
eval $evalStr
fi
}
_restartContainer(){
# $1 Dockername
echo -n "${NC}Redemarrage ... " >& $QUIET
${SIMU}
${SIMU} docker restart $1
echo "${GREEN}OK${NC}" >& $QUIET
}
_restartContainerAvecTempo(){
# $1 type
# $2 main container
# $2 composeDir
dir=$3
if [ -z $dir ]; then return 1; fi # le compose n'existe pas ... par exemple wordpress commun
cd "$dir"
echo -n "${NC}Arrêt ... " >& $QUIET
${SIMU}
if [ "$2" = "OUI" ]; then ${SIMU} docker-compose stop ;
else ${SIMU} docker-compose stop "${serviceNames[$1]}"
fi
${SIMU} sleep ${TEMPO_ACTION_STOP}
echo "${GREEN}OK${NC}" >& $QUIET
echo -n "${NC}Démarrage ... " >& $QUIET
if [ "$2" = "OUI" ]; then ${SIMU} docker-compose up -d ;
else ${SIMU} docker-compose up -d "${serviceNames[$1]}"
fi
${SIMU} sleep ${TEMPO_ACTION_START}
echo "${GREEN}OK${NC}" >& $QUIET
}
_reloadProxy() {
availableProxyComposes=($(getList "${KAZ_CONF_DIR}/container-proxy.list"))
for item in "${availableProxyComposes[@]}"; do
[ "${item}" = "proxy" ] && ${SIMU} ${KAZ_COMP_DIR}/${item}/reload.sh
done
}
_versionContainer() {
# Affiche la version d'un container donné
# $1 type
# $2 COMMUN
# $3 orgas
if [ -n "${subScripts[$1]}" ] ; then
evalStr="${KAZ_BIN_DIR}/${subScripts[$1]} --version"
if [ ! "$2" = "OUI" ]; then evalStr="${evalStr} $3" ; fi
eval $evalStr
fi
}
_listContainer(){
# pour un type donné (cloud / agora / wiki / wp), fait une synthèse de qui est up et down / nas ou local
# $1 type
RUNNING_FROM_NAS=$(comm -12 <(printf '%s\n' ${OrgasOnNAS[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort) | sed -e ':a;N;$!ba;s/\n/ /g')
RUNNING_LOCAL=$(comm -12 <(printf '%s\n' ${OrgasLocales[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort) | sed -e ':a;N;$!ba;s/\n/ /g')
# tu l'a vu la belle commande pour faire une exclusion de liste
DOWN_ON_NAS=$(comm -23 <(printf '%s\n' ${OrgasOnNAS[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort) | sed -e ':a;N;$!ba;s/\n/ /g')
DOWN_LOCAL=$(comm -23 <(printf '%s\n' ${OrgasLocales[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort)| sed -e ':a;N;$!ba;s/\n/ /g')
NB_SUR_NAS=$(echo ${OrgasOnNAS[$1]} | wc -w)
NB_LOCAUX=$(echo ${OrgasLocales[$1]} | wc -w)
NB_RUNNING_SUR_NAS=$(echo $RUNNING_FROM_NAS | wc -w)
NB_RUNNING_LOCALLY=$(echo $RUNNING_LOCAL | wc -w)
MAIN_RUNNING="${RED}DOWN${NC}"
if docker ps | grep -q " ${DockerServNames[$1]}"
then
MAIN_RUNNING="${GREEN}UP${NC}"
fi
[ -n "${composeDirs[${1}]}" ] && echo "${NC}Le ${1} commun est $MAIN_RUNNING"
if [[ ${NbOrgas[$1]} -gt 0 ]]; then
ENLOCALSTR=
if [[ ${NB_RUNNING_SUR_NAS[$1]} -gt 0 ]]; then ENLOCALSTR=" en local" ; fi
echo "Orgas : $NB_RUNNING_LOCALLY / $NB_LOCAUX running ${1}$ENLOCALSTR"
echo "${NC}UP : ${GREEN}${RUNNING_LOCAL}"
echo "${NC}DOWN : ${RED}$DOWN_LOCAL${NC}"
if [[ ${NB_RUNNING_SUR_NAS[$1]} -gt 0 ]]; then
echo "${NC}Orgas : $NB_RUNNING_SUR_NAS / $NB_SUR_NAS running depuis le NAS :"
echo "${NC}UP : ${GREEN}${RUNNING_FROM_NAS}"
echo "${NC}DOWN : ${RED}$DOWN_ON_NAS${NC}"
fi
fi
}
#########################################################
############# FONCTIONS SPECIFIQUES #####################
#########################################################
##################################
############### CLOUD ############
##################################
UpgradeClouds() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "UPGRADE des cloud" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
RunOCCCommand "upgrade"
}
OptimiseClouds() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Optimisation des cloud" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
RunOCCCommand "db:add-missing-indices" "db:convert-filecache-bigint --no-interaction"
}
InstallApps(){
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "INSTALL DES APPLIS sur les clouds : ${LISTE_APPS}" >& $QUIET
echo "-------------------------------------------------------------" >& $QUIET
if [ -z "${LISTE_APPS}" ]; then
echo "Aucune appli n'est précisée, j'installe les applis par défaut : ${APPLIS_PAR_DEFAUT}" >& $QUIET
LISTE_APPS="${APPLIS_PAR_DEFAUT}"
fi
PARAMS="-a \"$LISTE_APPS\""
if [ ! "$QUIET" = "1" ]; then PARAMS="${PARAMS} -q" ; fi
if [ -n "$SIMU" ]; then PARAMS="${PARAMS} -n" ; fi
_executeFunctionForAll "${KAZ_BIN_DIR}/${subScripts["cloud"]} -i $PARAMS" "Install des applis" "cloud" "#ORGA#"
}
UpdateApplis() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "UPDATE DES APPLIS des cloud : ${LISTE_APPS}" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
PARAMS="-a ${LISTE_APPS}"
if [ -z "${LISTE_APPS}" ]; then
echo "Aucune appli n'est précisée, je les met toutes à jour! " >& $QUIET
PARAMS=
fi
if [ ! "$QUIET" = "1" ]; then PARAMS="${PARAMS} -q" ; fi
if [ -n "$SIMU" ]; then PARAMS="${PARAMS} -n" ; fi
_executeFunctionForAll "${KAZ_BIN_DIR}/${subScripts["cloud"]} -u $PARAMS" "Maj des applis" "cloud" "#ORGA#"
}
##################################
############### AGORA ############
##################################
PostMessages(){
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Envoi de messages sur mattermost" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
for TEAM in "${!Posts[@]}"
do
MSG=${Posts[$TEAM]/\"/\\\"}
PARAMS="-p \"$TEAM\" \"$MSG\""
if [ ! "$QUIET" = "1" ]; then PARAMS="${PARAMS} -q" ; fi
if [ -n "$SIMU" ]; then PARAMS="${PARAMS} -n" ; fi
_executeFunctionForAll "${KAZ_BIN_DIR}/${subScripts["agora"]} $PARAMS" "Post vers $TEAM sur l'agora" "agora" "#ORGA#"
done
}
########## LANCEMENT COMMANDES OCC / MMCTL ############
RunCommands() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Envoi de commandes en direct" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
# $1 OCC / MMCTL / EXEC
# $ suivants : les commandes
for command in "${@:2}"
do
if [ $1 = "OCC" ]; then RunOCCCommand "$command" ; fi
if [ $1 = "MMCTL" ]; then RunMMCTLCommand "$command" ; fi
if [ $1 = "EXEC" ]; then RunEXECCommand "$command" ; fi
done
}
_runSingleOccCommand(){
# $1 Command
# $2 Dockername
${SIMU} docker exec -u 33 $2 /var/www/html/occ $1
}
_runSingleMmctlCommand(){
# $1 Command
# $2 Dockername
${SIMU} docker exec $2 bin/mmctl $1
}
_runSingleExecCommand(){
# $1 Command
# $2 Dockername
${SIMU} docker exec $2 $1
}
RunOCCCommand() {
# $1 Command
_executeFunctionForAll "_runSingleOccCommand \"${1}\"" "OCC $1" "cloud" "#DOCKERSERVNAME#"
}
RunMMCTLCommand() {
# $1 Command
_executeFunctionForAll "_runSingleMmctlCommand \"${1}\"" "MMCTL $1" "agora" "#DOCKERSERVNAME#"
}
RunEXECCommand() {
# $1 Command
_executeFunctionForAll "_runSingleExecCommand \"${1}\"" "docker exec $1" "${CONTAINERS_TYPES[@]}" "#DOCKERSERVNAME#"
}
########## Main #################
for ARG in "$@"; do
if [ -n "${GETOCCCOMAND}" ]; then # après un -occ
OCCCOMANDS+=("${ARG}")
GETOCCCOMAND=
elif [ -n "${GETEXECCOMAND}" ]; then # après un -exec
EXECCOMANDS+=("${ARG}")
GETEXECCOMAND=
elif [ -n "${GETAPPS}" ]; then # après un -a
LISTE_APPS="${LISTE_APPS} ${ARG}"
GETAPPS=""
elif [ -n "${GETMMCTLCOMAND}" ]; then # après un -mmctl
MMCTLCOMANDS+=("${ARG}")
GETMMCTLCOMAND=
elif [ -n "${GETTEAM}" ]; then # après un --post
GETMESSAGE="now"
GETTEAM=""
TEAM="${ARG}"
elif [ -n "${GETMESSAGE}" ]; then # après un --post "team:channel"
if [[ $TEAM == "-*" && ${#TEAM} -le 5 ]]; then echo "J'envoie mon message à \"${TEAM}\" ?? Arf, ça me plait pas j'ai l'impression que tu t'es planté sur la commande."; usage ; exit 1 ; fi
if [[ $ARG == "-*" && ${#ARG} -le 5 ]]; then echo "J'envoie le message \"${ARG}\" ?? Arf, ça me plait pas j'ai l'impression que tu t'es planté sur la commande."; usage ; exit 1 ; fi
if [[ ! $TEAM =~ .*:.+ ]]; then echo "Il faut mettre un destinataire sous la forme team:channel. Recommence !"; usage ; exit 1 ; fi
Posts+=( ["${TEAM}"]="$ARG" )
GETMESSAGE=""
TEAM=""
else
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'-m' | '--main' )
OPERATE_ON_MAIN="OUI-OUI" ;;
'-M' )
AVAILABLE_ORGAS= && OPERATE_ON_MAIN="OUI-OUI" ;; #pas d'orgas
'--nas' | '-nas' )
OPERATE_LOCAL_ORGA= ;; # pas les locales
'--local' | '-local' )
OPERATE_ON_NAS_ORGA= ;; # pas celles sur NAS
'-cloud'|'--cloud')
CONTAINERS_TYPES="${CONTAINERS_TYPES} cloud" ;;
'-agora'|'--agora'|'-mm'|'--mm'|'-matter'*|'--matter'*)
CONTAINERS_TYPES="${CONTAINERS_TYPES} agora" ;;
'-wiki'|'--wiki')
CONTAINERS_TYPES="${CONTAINERS_TYPES} wiki" ;;
'-wp'|'--wp')
CONTAINERS_TYPES="${CONTAINERS_TYPES} wp" ;;
'-office'|'--office'|'-collab'*|'--collab'*)
CONTAINERS_TYPES="${CONTAINERS_TYPES} office" ;;
'-paheko'|'--paheko')
CONTAINERS_TYPES="${CONTAINERS_TYPES} paheko" ;;
'-pod'|'--pod'|'-castopod'|'--castopod')
CONTAINERS_TYPES="${CONTAINERS_TYPES} castopod" ;;
'-spip')
CONTAINERS_TYPES="${CONTAINERS_TYPES} spip" ;;
'-t' )
COMMANDS="${COMMANDS} RESTART-COMPOSE" ;;
'-r' )
COMMANDS="${COMMANDS} RESTART-DOCKER" ;;
'-l' | '--list' )
COMMANDS="$(echo "${COMMANDS} LIST" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-U' | '--upgrade')
COMMANDS="$(echo "${COMMANDS} UPGRADE" | sed "s/\s/\n/g" | sort | uniq)" ;;
'--optim' )
COMMANDS="$(echo "${COMMANDS} OPTIMISE-CLOUD" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-u' )
COMMANDS="$(echo "${COMMANDS} UPDATE-CLOUD-APP" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-i' )
COMMANDS="$(echo "${COMMANDS} INSTALL-CLOUD-APP" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-a' )
GETAPPS="now" ;;
'-occ' )
COMMANDS="$(echo "${COMMANDS} RUN-CLOUD-OCC" | sed "s/\s/\n/g" | sort | uniq)"
GETOCCCOMAND="now" ;;
'-mmctl' )
COMMANDS="$(echo "${COMMANDS} RUN-AGORA-MMCTL" | sed "s/\s/\n/g" | sort | uniq)"
GETMMCTLCOMAND="now" ;;
'-exec' )
COMMANDS="$(echo "${COMMANDS} RUN-DOCKER-EXEC" | sed "s/\s/\n/g" | sort | uniq)"
GETEXECCOMAND="now" ;;
'-p' | '--post' )
COMMANDS="$(echo "${COMMANDS} POST-AGORA" | sed "s/\s/\n/g" | sort | uniq)"
GETTEAM="now" ;;
'-*' ) # ignore
;;
*)
GIVEN_ORGA="${GIVEN_ORGA} ${ARG%-orga}"
;;
esac
fi
done
if [[ "${COMMANDS[*]}" =~ "RESTART-COMPOSE" && "${COMMANDS[*]}" =~ "RESTART-TYPE" ]]; then
echo "Je restarte via docker-compose ou via docker mais pas les deux !"
usage
exit 1
fi
if [ -z "${COMMANDS}" ]; then
usage && exit
fi
if [ -n "${GIVEN_ORGA}" ]; then
# intersection des 2 listes : quelle commande de ouf !!
AVAILABLE_ORGAS=$(comm -12 <(printf '%s\n' ${AVAILABLE_ORGAS} | sort) <(printf '%s\n' ${GIVEN_ORGA} | sort))
fi
NB_ORGAS=$(echo "${AVAILABLE_ORGAS}" | wc -w )
if [[ $NB_ORGAS = 0 && -z "${OPERATE_ON_MAIN}" ]]; then
echo "Aucune orga trouvée."
exit 1
fi
NB_ORGAS_STR="$NB_ORGAS orgas"
[ -n "${OPERATE_ON_MAIN}" ] && NB_ORGAS_STR="$NB_ORGAS_STR + les communs"
_populate_lists # on récupère les clouds / agora / wiki / wp correspondants aux orga
if [[ $NB_ORGAS -gt 2 && "${COMMANDS[*]}" =~ 'INIT' ]]; then
ETLECLOUDCOMMUN=
[ -n "${OPERATE_ON_MAIN}" ] && ETLECLOUDCOMMUN=" ainsi que les containers commun"
echo "On s'apprête à initialiser les ${CONTAINERS_TYPES} suivants : ${AVAILABLE_ORGAS}${ETLECLOUDCOMMUN}"
checkContinue
fi
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'LIST' )
listContainers && exit ;;
'VERSION' )
version && exit ;;
'OPTIMISE-CLOUD' )
OptimiseClouds ;;
'RESTART-COMPOSE' )
restart-compose ;;
'RESTART-DOCKER' )
restart ;;
'UPDATE-CLOUD-APP' )
UpdateApplis ;;
'UPGRADE' )
UpgradeClouds ;;
'INIT' )
Init ;;
'INSTALL-CLOUD-APP' )
InstallApps ;;
'RUN-CLOUD-OCC' )
RunCommands "OCC" "${OCCCOMANDS[@]}" ;;
'RUN-AGORA-MMCTL' )
RunCommands "MMCTL" "${MMCTLCOMANDS[@]}" ;;
'RUN-DOCKER-EXEC' )
RunCommands "EXEC" "${EXECCOMANDS[@]}" ;;
'POST-AGORA' )
PostMessages ${Posts} ;;
esac
done

659
bin2/gestContainers_v2.sh Executable file
View File

@@ -0,0 +1,659 @@
#!/bin/bash
# Script de manipulation des containers en masse
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
PRG=$(basename $0)
tab_sites_destinations_possibles=($(get_Serveurs_Kaz))
# ${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "${CMD}"
# SITE_DST="${tab_sites_destinations_possibles[1]}"
# ${tab_sites_destinations_possibles[@]}
#GLOBAL VARS
NAS_VOL="/mnt/disk-nas1/docker/volumes/"
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
availableContainersCommuns=( $(getList "${KAZ_CONF_DIR}/container-withMail.list") $(getList "${KAZ_CONF_DIR}/container-withoutMail.list"))
OPERATE_ON_MAIN= # par defaut NON on ne traite que des orgas
OPERATE_ON_NAS_ORGA="OUI" # par defaut oui, on va aussi sur les orgas du NAS
OPERATE_LOCAL_ORGA="OUI" # par defaut oui
TEMPO_ACTION_STOP=2 # Lors de redémarrage avec tempo, on attend après le stop
TEMPO_ACTION_START=120 # Lors de redémarrage avec tempo, avant de reload le proxy
CONTAINERS_TYPES=
defaultContainersTypes="cloud agora wp wiki office paheko" # les containers gérés par ce script.
declare -A DockerServNames # le nom des containers correspondant
DockerServNames=( [cloud]="${nextcloudServName}" [agora]="${mattermostServName}" [wiki]="${dokuwikiServName}" [wp]="${wordpressServName}" [office]="${officeServName}" [paheko]="${pahekoServName}" )
declare -A FilterLsVolume # Pour trouver quel volume appartient à quel container
FilterLsVolume=( [cloud]="cloudMain" [agora]="matterConfig" [wiki]="wikiConf" [wp]="wordpress" )
declare -A composeDirs # Le nom du repertoire compose pour le commun
composeDirs=( [cloud]="cloud" [agora]="mattermost" [wiki]="dokuwiki" [office]="collabora" [paheko]="paheko" )
declare -A serviceNames # Le nom du du service dans le dockerfile d'orga
serviceNames=( [cloud]="cloud" [agora]="agora" [wiki]="dokuwiki" [wp]="wordpress" [office]="collabora")
declare -A subScripts
subScripts=( [cloud]="manageCloud.sh" [agora]="manageAgora.sh" [wiki]="manageWiki.sh" [wp]="manageWp.sh" )
declare -A OrgasOnNAS
declare -A OrgasLocales
declare -A NbOrgas
declare -A RunningOrgas
declare -A Posts
QUIET="1" # redirection des echo
OCCCOMANDS=()
MMCTLCOMANDS=()
EXECCOMANDS=()
# CLOUD
APPLIS_PAR_DEFAUT="tasks calendar contacts bookmarks richdocuments external drawio snappymail"
usage() {
echo "${PRG} [OPTION] [CONTAINERS_TYPES] [COMMANDES] [SERVEURS] [ORGAS]
Ce script regroupe l'ensemble des opérations que l'on souhaite automatiser sur plusieurs containers, sur un ou plusieurs sites.
Par defaut, sur les orgas, mais on peut aussi ajouter les communs
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
-m|--main Traite aussi le container commun (cloud commun / agora commun / wiki commun)
-M Ne traite que le container commun, et pas les orgas
--nas Ne traite QUE les orgas sur le NAS
--local Ne traite pas les orgas sur le NAS
-v|--version Donne la version des containers et signale les MàJ
-l|--list Liste des containers (up / down, local ou nas) de cette machine
CONTAINERS_TYPES
-cloud Pour agir sur les clouds
-agora Pour agir sur les agoras
-wp Les wp
-wiki Les wiki
-office Les collabora
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du container
-t Redémarre avec tempo (docker-compose down puis sleep ${TEMPO_ACTION_STOP} puis up puis sleep ${TEMPO_ACTION_START})
-r Redémarre sans tempo (docker restart)
-exec \"command\" Envoie une commande docker exec
--optim Lance la procédure Nextcloud pour optimiser les performances ** **
-occ \"command\" Envoie une commande via occ ** **
-u Mets à jour les applis ** SPECIFIQUES **
-i Install des applis ** CLOUD **
-a \"app1 app2 ...\" Choix des appli à installer ou mettre à jour (entre guillemets) ** **
-U|--upgrade Upgrade des clouds ** **
-mmctl \"command\" Envoie une commande via mmctl ** SPECIFIQUES **
-p|--post \"team\" \"message\" Poste un message dans une team agora ** AGORA **
SERVEURS
--all-srv Lance sur tous les serveurs ${tab_sites_destinations_possibles[@]}, sinon c'est uniquement sur ${site}
ORGAS sur ${site}
[orga1 orga2 ... ] on peut filtrer parmi : ${AVAILABLE_ORGAS}
Exemples :
${PRG} -office -m -r # restart de tous les collaboras (libére RAM)
${PRG} -cloud -u -r -q -n # affiche toutes les commandes (-n -q ) pour mettre à jour toutes les applis des clouds + restart (-u -r)
${PRG} -p \"monorga:town-square\" \"Hello\" monorga # envoie Hello sur le centreville de l'orga monorga sur son mattermost dédié
${PRG} -cloud -occ \"config:system:set default_phone_region --value='FR'\" --all-srv # modifie la variable default_phone_region dans le config.php de tous les clouds de tous les serveurs
"
}
####################################################
################ fonctions clefs ###################
####################################################
_populate_lists(){
# récupère les listes d'orga à traiter
# on rempli les tableaux OrgasOnNAS / OrgasLocales / NbOrgas ... par type de container
if [ -z "${CONTAINERS_TYPES}" ]; then
# wow, on traite tout le monde d'un coup...
CONTAINERS_TYPES="$defaultContainersTypes"
fi
for TYPE in ${CONTAINERS_TYPES}; do
if [ -n "${FilterLsVolume[$TYPE]}" ] ; then # on regarde dans les volumes
[ -n "$OPERATE_ON_NAS_ORGA" ] && OrgasOnNAS["$TYPE"]=$( _getListOrgas ${NAS_VOL} ${FilterLsVolume[$TYPE]} )
[ -n "$OPERATE_LOCAL_ORGA" ] && OrgasLocales["$TYPE"]=$( _getListOrgas ${DOCK_VOL} ${FilterLsVolume[$TYPE]} "SANSLN")
else # un docker ps s'il n'y a pas de volumes
[ -n "$OPERATE_LOCAL_ORGA" ] && OrgasLocales["$TYPE"]=$(docker ps --format '{{.Names}}' | grep ${DockerServNames[$TYPE]} | sed -e "s/-*${DockerServNames[$TYPE]}//")
fi
NbOrgas["$TYPE"]=$(($(echo ${OrgasOnNAS["$TYPE"]} | wc -w) + $(echo ${OrgasLocales["$TYPE"]} | wc -w)))
RunningOrgas["$TYPE"]=$(docker ps --format '{{.Names}}' | grep ${DockerServNames[$TYPE]} | sed -e "s/-*${DockerServNames[$TYPE]}//")
done
}
_getListOrgas(){
# retrouve les orgas à partir des volume présents
# $1 where to lookup
# $2 filter
# $3 removeSymbolicLinks
[ ! -d $1 ] || [ -z "$2" ] && return 1 # si le repertoire n'existe pas on skip
LIST=$(ls "${1}" | grep -i orga | grep -i "$2" | sed -e "s/-${2}$//g" | sed -e 's/^orga_//')
[ -n "$3" ] && LIST=$(ls -F "${1}" | grep '/' | grep -i orga | grep -i "$2" | sed -e "s/-${2}\/$//g" | sed -e 's/^orga_//')
LIST=$(comm -12 <(printf '%s\n' ${LIST} | sort) <(printf '%s\n' ${AVAILABLE_ORGAS} | sort))
echo "$LIST"
}
_executeFunctionForAll(){
# Parcours des container et lancement des commandes
# Les commandes ont en derniers paramètres le type et l'orga et une string parmi KAZ/ORGANAS/ORGALOCAL pour savoir sur quoi on opère
# $1 function
# $2 nom de la commande
# $3 quel types de containers
# $4 params : quels paramètres à passer à la commande (les clefs sont #ORGA# #DOCKERSERVNAME# #SURNAS# #ISMAIN# #TYPE# #COMPOSEDIR# )
for TYPE in ${3}; do
if [ -n "$OPERATE_ON_MAIN" ]; then
if [[ -n "${composeDirs[$TYPE]}" && "${availableContainersCommuns[*]}" =~ "${composeDirs[$TYPE]}" ]]; then # pas de cloud / agora / wp / wiki sur cette instance
Dockername=${DockerServNames[$TYPE]}
PARAMS=$(echo $4 | sed -e "s/#ORGA#//g;s/#DOCKERSERVNAME#/$Dockername/g;s/#ISMAIN#/OUI/g;s/#SURNAS#/NON/g;s/#TYPE#/$TYPE/g;s%#COMPOSEDIR#%${KAZ_COMP_DIR}/${composeDirs[$TYPE]}%g" )
echo "-------- $2 $TYPE COMMUN ----------------------------" >& $QUIET
eval "$1" $PARAMS
fi
fi
if [[ ${NbOrgas[$TYPE]} -gt 0 ]]; then
echo "-------- $2 des $TYPE des ORGAS ----------------------------" >& $QUIET
COMPTEUR=1
if [ -n "$OPERATE_LOCAL_ORGA" ]; then
for ORGA in ${OrgasLocales[$TYPE]}; do
Dockername=${ORGA}-${DockerServNames[$TYPE]}
PARAMS=$(echo $4 | sed -e "s/#ORGA#/${ORGA}/g;s/#DOCKERSERVNAME#/$Dockername/g;s/#ISMAIN#/NON/g;s/#SURNAS#/NON/g;s/#TYPE#/$TYPE/g;s%#COMPOSEDIR#%${KAZ_COMP_DIR}/${ORGA}-orga%g" )
echo "${RED} ${ORGA}-orga ${NC}($COMPTEUR/${NbOrgas[$TYPE]})" >& $QUIET
eval "$1" $PARAMS
COMPTEUR=$((COMPTEUR + 1))
done
fi
if [ -n "$OPERATE_ON_NAS_ORGA" ]; then
for ORGA in ${OrgasOnNAS[$TYPE]}; do
Dockername=${ORGA}-${DockerServNames[$TYPE]}
PARAMS=$(echo $4 | sed -e "s/#ORGA#/${ORGA}/g;s/#DOCKERSERVNAME#/$Dockername/g;s/#ISMAIN#/NON/g;s/#SURNAS#/OUI/g;s/#TYPE#/$TYPE/g;s%#COMPOSEDIR#%${KAZ_COMP_DIR}/${ORGA}-orga%g" )
echo "${RED} ${ORGA}-orga ${NC}($COMPTEUR/${NbOrgas[$TYPE]})" >& $QUIET
eval "$1" $PARAMS
COMPTEUR=$((COMPTEUR + 1))
done
fi
fi
done
}
##############################################
################ COMMANDES ###################
##############################################
Init(){
# Initialisation des containers
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Initialisation" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_initContainer" "Initialisation" "${CONTAINERS_TYPES[@]}" "#TYPE# #ISMAIN# #SURNAS# #ORGA# "
}
restart-compose() {
# Parcours les containers et redémarre avec tempo
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "DOCKER-COMPOSE DOWN puis sleep ${TEMPO_ACTION_STOP}" >& $QUIET
echo "DOCKER-COMPOSE UP puis sleep ${TEMPO_ACTION_START}" >& $QUIET
echo "de ${CONTAINERS_TYPES} pour $NB_ORGAS_STR" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_restartContainerAvecTempo" "Restart" "${CONTAINERS_TYPES[@]}" "#TYPE# #ISMAIN# #COMPOSEDIR#"
${SIMU} sleep ${TEMPO_ACTION_START}
_reloadProxy
echo "--------------------------------------------------------" >& $QUIET
echo "${GREEN}FIN${NC} " >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
}
restart() {
# Parcours les containers et redémarre
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "DOCKER RESTART des ${CONTAINERS_TYPES} pour $NB_ORGAS_STR" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_restartContainer" "Restart" "${CONTAINERS_TYPES[@]}" "#DOCKERSERVNAME#"
_reloadProxy
echo "--------------------------------------------------------" >& $QUIET
echo "${GREEN}FIN${NC} " >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
}
version(){
# Parcours les containers et affiche leurs versions
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "VERSIONS" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
_executeFunctionForAll "_versionContainer" "Version" "${CONTAINERS_TYPES[@]}" "#TYPE# #ISMAIN# #ORGA#"
}
listContainers(){
echo "${NC}--------------------------------------------------------"
echo "LISTES"
echo "------------------------------------------------------------"
for TYPE in ${CONTAINERS_TYPES}; do
echo "****************** $TYPE ****************"
_listContainer "$TYPE"
done
}
######################## Fonctions génériques #######################
_initContainer(){
# $1 type
# $2 COMMUN
# $3 ON NAS
# $4 orgas
if [ -n "${subScripts[$1]}" ] ; then
evalStr="${KAZ_BIN_DIR}/${subScripts[$1]} --install"
if [ "$3" = "OUI" ]; then evalStr="${evalStr} -nas" ; fi
if [ ! "$QUIET" = "1" ]; then evalStr="${evalStr} -q" ; fi
if [ -n "$SIMU" ]; then evalStr="${evalStr} -n" ; fi
if [ ! "$2" = "OUI" ]; then evalStr="${evalStr} $4" ; fi
eval $evalStr
fi
}
_restartContainer(){
# $1 Dockername
echo -n "${NC}Redemarrage ... " >& $QUIET
${SIMU}
${SIMU} docker restart $1
echo "${GREEN}OK${NC}" >& $QUIET
}
_restartContainerAvecTempo(){
# $1 type
# $2 main container
# $2 composeDir
dir=$3
if [ -z $dir ]; then return 1; fi # le compose n'existe pas ... par exemple wordpress commun
cd "$dir"
echo -n "${NC}Arrêt ... " >& $QUIET
${SIMU}
if [ "$2" = "OUI" ]; then ${SIMU} docker-compose stop ;
else ${SIMU} docker-compose stop "${serviceNames[$1]}"
fi
${SIMU} sleep ${TEMPO_ACTION_STOP}
echo "${GREEN}OK${NC}" >& $QUIET
echo -n "${NC}Démarrage ... " >& $QUIET
if [ "$2" = "OUI" ]; then ${SIMU} docker-compose up -d ;
else ${SIMU} docker-compose up -d "${serviceNames[$1]}"
fi
${SIMU} sleep ${TEMPO_ACTION_START}
echo "${GREEN}OK${NC}" >& $QUIET
}
_reloadProxy() {
availableProxyComposes=($(getList "${KAZ_CONF_DIR}/container-proxy.list"))
for item in "${availableProxyComposes[@]}"; do
${SIMU} ${KAZ_COMP_DIR}/${item}/reload.sh
done
}
_versionContainer() {
# Affiche la version d'un container donné
# $1 type
# $2 COMMUN
# $3 orgas
if [ -n "${subScripts[$1]}" ] ; then
evalStr="${KAZ_BIN_DIR}/${subScripts[$1]} --version"
if [ ! "$2" = "OUI" ]; then evalStr="${evalStr} $3" ; fi
eval $evalStr
fi
}
_listContainer(){
# pour un type donné (cloud / agora / wiki / wp), fait une synthèse de qui est up et down / nas ou local
# $1 type
RUNNING_FROM_NAS=$(comm -12 <(printf '%s\n' ${OrgasOnNAS[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort) | sed -e ':a;N;$!ba;s/\n/ /g')
RUNNING_LOCAL=$(comm -12 <(printf '%s\n' ${OrgasLocales[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort) | sed -e ':a;N;$!ba;s/\n/ /g')
# tu l'a vu la belle commande pour faire une exclusion de liste
DOWN_ON_NAS=$(comm -23 <(printf '%s\n' ${OrgasOnNAS[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort) | sed -e ':a;N;$!ba;s/\n/ /g')
DOWN_LOCAL=$(comm -23 <(printf '%s\n' ${OrgasLocales[$1]} | sort) <(printf '%s\n' ${RunningOrgas[$1]} | sort)| sed -e ':a;N;$!ba;s/\n/ /g')
NB_SUR_NAS=$(echo ${OrgasOnNAS[$1]} | wc -w)
NB_LOCAUX=$(echo ${OrgasLocales[$1]} | wc -w)
NB_RUNNING_SUR_NAS=$(echo $RUNNING_FROM_NAS | wc -w)
NB_RUNNING_LOCALLY=$(echo $RUNNING_LOCAL | wc -w)
MAIN_RUNNING="${RED}DOWN${NC}"
if docker ps | grep -q " ${DockerServNames[$1]}"
then
MAIN_RUNNING="${GREEN}UP${NC}"
fi
[ -n "${composeDirs[${1}]}" ] && echo "${NC}Le ${1} commun est $MAIN_RUNNING"
if [[ ${NbOrgas[$1]} -gt 0 ]]; then
ENLOCALSTR=
if [[ ${NB_RUNNING_SUR_NAS[$1]} -gt 0 ]]; then ENLOCALSTR=" en local" ; fi
echo "Orgas : $NB_RUNNING_LOCALLY / $NB_LOCAUX running ${1}$ENLOCALSTR"
echo "${NC}UP : ${GREEN}${RUNNING_LOCAL}"
echo "${NC}DOWN : ${RED}$DOWN_LOCAL${NC}"
if [[ ${NB_RUNNING_SUR_NAS[$1]} -gt 0 ]]; then
echo "${NC}Orgas : $NB_RUNNING_SUR_NAS / $NB_SUR_NAS running depuis le NAS :"
echo "${NC}UP : ${GREEN}${RUNNING_FROM_NAS}"
echo "${NC}DOWN : ${RED}$DOWN_ON_NAS${NC}"
fi
fi
}
#########################################################
############# FONCTIONS SPECIFIQUES #####################
#########################################################
##################################
############### CLOUD ############
##################################
UpgradeClouds() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "UPGRADE des cloud" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
RunOCCCommand "upgrade"
}
OptimiseClouds() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Optimisation des cloud" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
RunOCCCommands "db:add-missing-indices" "db:convert-filecache-bigint --no-interaction"
}
InstallApps(){
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "INSTALL DES APPLIS sur les clouds : ${LISTE_APPS}" >& $QUIET
echo "-------------------------------------------------------------" >& $QUIET
if [ -z "${LISTE_APPS}" ]; then
echo "Aucune appli n'est précisée, j'installe les applis par défaut : ${APPLIS_PAR_DEFAUT}" >& $QUIET
LISTE_APPS="${APPLIS_PAR_DEFAUT}"
fi
PARAMS="-a \"$LISTE_APPS\""
if [ ! "$QUIET" = "1" ]; then PARAMS="${PARAMS} -q" ; fi
if [ -n "$SIMU" ]; then PARAMS="${PARAMS} -n" ; fi
_executeFunctionForAll "${KAZ_BIN_DIR}/${subScripts["cloud"]} -i $PARAMS" "Install des applis" "cloud" "#ORGA#"
}
UpdateApplis() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "UPDATE DES APPLIS des cloud : ${LISTE_APPS}" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
PARAMS="-a ${LISTE_APPS}"
if [ -z "${LISTE_APPS}" ]; then
echo "Aucune appli n'est précisée, je les met toutes à jour! " >& $QUIET
PARAMS=
fi
if [ ! "$QUIET" = "1" ]; then PARAMS="${PARAMS} -q" ; fi
if [ -n "$SIMU" ]; then PARAMS="${PARAMS} -n" ; fi
_executeFunctionForAll "${KAZ_BIN_DIR}/${subScripts["cloud"]} -u $PARAMS" "Maj des applis" "cloud" "#ORGA#"
}
##################################
############### AGORA ############
##################################
PostMessages(){
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Envoi de messages sur mattermost" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
for TEAM in "${!Posts[@]}"
do
MSG=${Posts[$TEAM]/\"/\\\"}
PARAMS="-p \"$TEAM\" \"$MSG\""
if [ ! "$QUIET" = "1" ]; then PARAMS="${PARAMS} -q" ; fi
if [ -n "$SIMU" ]; then PARAMS="${PARAMS} -n" ; fi
_executeFunctionForAll "${KAZ_BIN_DIR}/${subScripts["agora"]} $PARAMS" "Post vers $TEAM sur l'agora" "agora" "#ORGA#"
done
}
########## LANCEMENT COMMANDES OCC / MMCTL ############
RunCommands() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "Envoi de commandes en direct" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
# $1 OCC / MMCTL / EXEC
# $ suivants : les commandes
for command in "${@:2}"
do
if [ $1 = "OCC" ]; then RunOCCCommand "$command" ; fi
if [ $1 = "MMCTL" ]; then RunMMCTLCommand "$command" ; fi
if [ $1 = "EXEC" ]; then RunEXECCommand "$command" ; fi
done
}
_runSingleOccCommand(){
# $1 Command
# $2 Dockername
${SIMU} docker exec -u 33 $2 /var/www/html/occ $1
}
_runSingleMmctlCommand(){
# $1 Command
# $2 Dockername
${SIMU} docker exec $2 bin/mmctl $1
}
_runSingleExecCommand(){
# $1 Command
# $2 Dockername
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} docker exec $2 $1
}
RunOCCCommand() {
# $1 Command
_executeFunctionForAll "_runSingleOccCommand \"${1}\"" "OCC $1" "cloud" "#DOCKERSERVNAME#"
}
RunMMCTLCommand() {
# $1 Command
_executeFunctionForAll "_runSingleMmctlCommand \"${1}\"" "MMCTL $1" "agora" "#DOCKERSERVNAME#"
}
RunEXECCommand() {
# $1 Command
_executeFunctionForAll "_runSingleExecCommand \"${1}\"" "docker exec $1" "${CONTAINERS_TYPES[@]}" "#DOCKERSERVNAME#"
}
########## Contrôle #################
for ARG in "$@"; do
# Seul PROD1 peut attaquer tous les autres serveurs kaz sinon un serveur kaz peut juste s'attaquer lui-même (aie!)
if [ "${ARG}" == "--all-srv" -a "${site}" != "prod1" ]; then
echo "${RED}--all-srv choisi alors qu'on n'est pas sur prod1 : impossible, on quitte${NC}"
# mais pour l'instant on autorise pour les tests
# exit
fi
done
########## Main #################
for ARG in "$@"; do
#echo "${ARG}"
if [ -n "${GETOCCCOMAND}" ]; then # après un -occ
OCCCOMANDS+=("${ARG}")
GETOCCCOMAND=
elif [ -n "${GETEXECCOMAND}" ]; then # après un -exec
EXECCOMANDS+=("${ARG}")
GETEXECCOMAND=
elif [ -n "${GETAPPS}" ]; then # après un -a
LISTE_APPS="${LISTE_APPS} ${ARG}"
GETAPPS=""
elif [ -n "${GETMMCTLCOMAND}" ]; then # après un -mmctl
MMCTLCOMANDS+=("${ARG}")
GETMMCTLCOMAND=
elif [ -n "${GETTEAM}" ]; then # après un --post
GETMESSAGE="now"
GETTEAM=""
TEAM="${ARG}"
elif [ -n "${GETMESSAGE}" ]; then # après un --post "team:channel"
if [[ $TEAM == "-*" && ${#TEAM} -le 5 ]]; then echo "J'envoie mon message à \"${TEAM}\" ?? Arf, ça me plait pas j'ai l'impression que tu t'es planté sur la commande."; usage ; exit 1 ; fi
if [[ $ARG == "-*" && ${#ARG} -le 5 ]]; then echo "J'envoie le message \"${ARG}\" ?? Arf, ça me plait pas j'ai l'impression que tu t'es planté sur la commande."; usage ; exit 1 ; fi
if [[ ! $TEAM =~ .*:.+ ]]; then echo "Il faut mettre un destinataire sous la forme team:channel. Recommence !"; usage ; exit 1 ; fi
Posts+=( ["${TEAM}"]="$ARG" )
GETMESSAGE=""
TEAM=""
else
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'-m' | '--main' )
OPERATE_ON_MAIN="OUI-OUI" ;;
'-M' )
AVAILABLE_ORGAS= && OPERATE_ON_MAIN="OUI-OUI" ;; #pas d'orgas
'--nas' | '-nas' )
OPERATE_LOCAL_ORGA= ;; # pas les locales
'--local' | '-local' )
OPERATE_ON_NAS_ORGA= ;; # pas celles sur NAS
'-cloud'|'--cloud')
CONTAINERS_TYPES="${CONTAINERS_TYPES} cloud" ;;
'-agora'|'--agora')
CONTAINERS_TYPES="${CONTAINERS_TYPES} agora" ;;
'-wiki'|'--wiki')
CONTAINERS_TYPES="${CONTAINERS_TYPES} wiki" ;;
'-wp'|'--wp')
CONTAINERS_TYPES="${CONTAINERS_TYPES} wp" ;;
'-office'|'--office')
CONTAINERS_TYPES="${CONTAINERS_TYPES} office" ;;
'-t' )
COMMANDS="${COMMANDS} RESTART-COMPOSE" ;;
'-r' )
COMMANDS="${COMMANDS} RESTART-DOCKER" ;;
'-l' | '--list' )
COMMANDS="$(echo "${COMMANDS} LIST" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-U' | '--upgrade')
COMMANDS="$(echo "${COMMANDS} UPGRADE" | sed "s/\s/\n/g" | sort | uniq)" ;;
'--optim' )
COMMANDS="$(echo "${COMMANDS} OPTIMISE-CLOUD" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-u' )
COMMANDS="$(echo "${COMMANDS} UPDATE-CLOUD-APP" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-i' )
COMMANDS="$(echo "${COMMANDS} INSTALL-CLOUD-APP" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-a' )
GETAPPS="now" ;;
'-occ' )
COMMANDS="$(echo "${COMMANDS} RUN-CLOUD-OCC" | sed "s/\s/\n/g" | sort | uniq)"
GETOCCCOMAND="now" ;;
'-mmctl' )
COMMANDS="$(echo "${COMMANDS} RUN-AGORA-MMCTL" | sed "s/\s/\n/g" | sort | uniq)"
GETMMCTLCOMAND="now" ;;
'-exec' )
COMMANDS="$(echo "${COMMANDS} RUN-DOCKER-EXEC" | sed "s/\s/\n/g" | sort | uniq)"
GETEXECCOMAND="now" ;;
'-p' | '--post' )
COMMANDS="$(echo "${COMMANDS} POST-AGORA" | sed "s/\s/\n/g" | sort | uniq)"
GETTEAM="now" ;;
'-*' ) # ignore
;;
*)
GIVEN_ORGA="${GIVEN_ORGA} ${ARG%-orga}"
;;
esac
fi
done
if [[ "${COMMANDS[*]}" =~ "RESTART-COMPOSE" && "${COMMANDS[*]}" =~ "RESTART-TYPE" ]]; then
echo "Je restarte via docker-compose ou via docker mais pas les deux !"
usage
exit 1
fi
if [ -z "${COMMANDS}" ]; then
usage && exit
fi
if [ -n "${GIVEN_ORGA}" ]; then
# intersection des 2 listes : quelle commande de ouf !!
AVAILABLE_ORGAS=$(comm -12 <(printf '%s\n' ${AVAILABLE_ORGAS} | sort) <(printf '%s\n' ${GIVEN_ORGA} | sort))
fi
NB_ORGAS=$(echo "${AVAILABLE_ORGAS}" | wc -w )
if [[ $NB_ORGAS = 0 && -z "${OPERATE_ON_MAIN}" ]]; then
echo "Aucune orga trouvée."
exit 1
fi
NB_ORGAS_STR="$NB_ORGAS orgas"
[ -n "${OPERATE_ON_MAIN}" ] && NB_ORGAS_STR="$NB_ORGAS_STR + les communs"
_populate_lists # on récupère les clouds / agora / wiki / wp correspondants aux orga
if [[ $NB_ORGAS -gt 2 && "${COMMANDS[*]}" =~ 'INIT' ]]; then
ETLECLOUDCOMMUN=
[ -n "${OPERATE_ON_MAIN}" ] && ETLECLOUDCOMMUN=" ainsi que les containers commun"
echo "On s'apprête à initialiser les ${CONTAINERS_TYPES} suivants : ${AVAILABLE_ORGAS}${ETLECLOUDCOMMUN}"
checkContinue
fi
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'LIST' )
listContainers && exit ;;
'VERSION' )
version && exit ;;
'OPTIMISE-CLOUD' )
OptimiseClouds ;;
'RESTART-COMPOSE' )
restart-compose ;;
'RESTART-DOCKER' )
restart ;;
'UPDATE-CLOUD-APP' )
UpdateApplis ;;
'UPGRADE' )
UpgradeClouds ;;
'INIT' )
Init ;;
'INSTALL-CLOUD-APP' )
InstallApps ;;
'RUN-CLOUD-OCC' )
RunCommands "OCC" "${OCCCOMANDS[@]}" ;;
'RUN-AGORA-MMCTL' )
RunCommands "MMCTL" "${MMCTLCOMANDS[@]}" ;;
'RUN-DOCKER-EXEC' )
RunCommands "EXEC" "${EXECCOMANDS[@]}" ;;
'POST-AGORA' )
PostMessages ${Posts} ;;
esac
done

1188
bin2/gestUsers.sh Executable file

File diff suppressed because it is too large Load Diff

18
bin2/getX509Certificates.sh Executable file
View File

@@ -0,0 +1,18 @@
#/bin/bash
#koi: récupération des certifs traefik vers x509 pour mail et listes
#ki: fanch
#kan: 18/04/2025
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
certificates="mail listes"
for i in ${certificates}; do
jq -r ".letsencrypt.Certificates[] | select(.domain.main==\"${i}.${domain}\") | .certificate" /var/lib/docker/volumes/traefik_letsencrypt/_data/acme.json | base64 -d > /etc/ssl/certs/${i}.pem
jq -r ".letsencrypt.Certificates[] | select(.domain.main==\"${i}.${domain}\") | .key" /var/lib/docker/volumes/traefik_letsencrypt/_data/acme.json | base64 -d > /etc/ssl/private/${i}.key
chmod 600 /etc/ssl/private/${i}.key
done

219
bin2/init.sh Executable file
View File

@@ -0,0 +1,219 @@
#!/bin/bash
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
cd "${KAZ_ROOT}"
MY_MAIN_IP=$(ip a | grep "inet " | head -2 | tail -1 | sed "s%.*inet *\([0-9.]*\)/.*%\1%")
MY_SECOND_IP=$(ip a | grep "inet " | head -3 | tail -1 | sed "s%.*inet *\([0-9.]*\)/.*%\1%")
DOMAIN="kaz.local"
DOMAIN_SYMPA="kaz.local"
HTTP_PROTO="https"
MAIN_IP="${MY_MAIN_IP}"
SYMPA_IP="MY_SECOND_IP"
RESTART_POLICY="no"
JIRAFEAU_DIR="/var/jirafeauData/$(apg -n 1 -m 16 -M NCL)/"
DOCKERS_TMPL_ENV="${KAZ_CONF_DIR}/dockers.tmpl.env"
RESET_ENV="true"
if [ -f "${DOCKERS_ENV}" ]; then
DOMAIN=$(getValInFile "${DOCKERS_ENV}" "domain")
DOMAIN_SYMPA=$(getValInFile "${DOCKERS_ENV}" "domain_sympa")
HTTP_PROTO=$(getValInFile "${DOCKERS_ENV}" "httpProto")
MAIN_IP=$(getValInFile "${DOCKERS_ENV}" "MAIN_IP")
SYMPA_IP=$(getValInFile "${DOCKERS_ENV}" "SYMPA_IP")
RESTART_POLICY=$(getValInFile "${DOCKERS_ENV}" "restartPolicy")
JIRAFEAU_DIR=$(getValInFile "${DOCKERS_ENV}" "jirafeauDir")
while : ; do
read -p "Change '${DOCKERS_ENV}'? " resetEnv
case "${resetEnv}" in
[yYoO]* )
break
;;
""|[Nn]* )
RESET_ENV=""
break
;;
* )
echo "Please answer yes no."
;;
esac
done
fi
[ -n "${RESET_ENV}" ] && {
echo "Reset '${DOCKERS_ENV}'"
read -p " * domain (kaz.bzh / dev.kaz.bzh / kaz.local)? [${YELLOW}${DOMAIN}${NC}] " domain
case "${domain}" in
"" )
DOMAIN="${DOMAIN}"
;;
* )
# XXX ne conserver que .-0-9a-z
DOMAIN=$(sed 's/[^a-z0-9.-]//g' <<< "${domain}")
;;
esac
read -p " * lists domain (kaz.bzh / kaz2.ovh / kaz.local)? [${YELLOW}${DOMAIN_SYMPA}${NC}] " domain
case "${domain}" in
"" )
DOMAIN_SYMPA="${DOMAIN_SYMPA}"
;;
* )
DOMAIN_SYMPA="${domain}"
;;
esac
while : ; do
read -p " * protocol (https / http)? [${YELLOW}${HTTP_PROTO}${NC}] " proto
case "${proto}" in
"" )
HTTP_PROTO="${HTTP_PROTO}"
break
;;
"https"|"http" )
HTTP_PROTO="${proto}"
break
;;
* ) echo "Please answer joe, emacs, vim or no."
;;
esac
done
while : ; do
read -p " * main IP (ip)? [${YELLOW}${MAIN_IP}${NC}] " ip
case "${ip}" in
"" )
MAIN_IP="${MAIN_IP}"
break
;;
* )
if testValidIp "${ip}" ; then
MAIN_IP="${ip}"
break
else
echo "Please answer x.x.x.x format."
fi
;;
esac
done
while : ; do
read -p " * lists IP (ip)? [${YELLOW}${SYMPA_IP}${NC}] " ip
case "${ip}" in
"" )
SYMPA_IP="${SYMPA_IP}"
break
;;
* )
if testValidIp "${ip}" ; then
SYMPA_IP="${ip}"
break
else
echo "Please answer x.x.x.x format."
fi
;;
esac
done
while : ; do
read -p " * restart policy (always / unless-stopped / no)? [${YELLOW}${RESTART_POLICY}${NC}] " policy
case "${policy}" in
"" )
RESTART_POLICY="${RESTART_POLICY}"
break
;;
"always"|"unless-stopped"|"no")
RESTART_POLICY="${policy}"
break
;;
* ) echo "Please answer always, unless-stopped or no."
;;
esac
done
while : ; do
read -p " * Jirafeau dir? [${YELLOW}${JIRAFEAU_DIR}${NC}] " jirafeauDir
case "${jirafeauDir}" in
"" )
JIRAFEAU_DIR="${JIRAFEAU_DIR}"
break
;;
* )
if [[ "${jirafeauDir}" =~ ^/var/jirafeauData/[0-9A-Za-z]{1,16}/$ ]]; then
JIRAFEAU_DIR="${jirafeauDir}"
break
else
echo "Please give dir name (/var/jirafeauData/[0-9A-Za-z]{1,3}/)."
fi
;;
esac
done
[ -f "${DOCKERS_ENV}" ] || cp "${DOCKERS_TMPL_ENV}" "${DOCKERS_ENV}"
sed -i "${DOCKERS_ENV}" \
-e "s%^\s*domain\s*=.*$%domain=${DOMAIN}%" \
-e "s%^\s*domain_sympa\s*=.*$%domain_sympa=${DOMAIN_SYMPA}%" \
-e "s%^\s*httpProto\s*=.*$%httpProto=${HTTP_PROTO}%" \
-e "s%^\s*MAIN_IP\s*=.*$%MAIN_IP=${MAIN_IP}%" \
-e "s%^\s*SYMPA_IP\s*=.*$%SYMPA_IP=${SYMPA_IP}%" \
-e "s%^\s*restartPolicy\s*=.*$%restartPolicy=${RESTART_POLICY}%" \
-e "s%^\s*ldapRoot\s*=.*$%ldapRoot=dc=${DOMAIN_SYMPA/\./,dc=}%" \
-e "s%^\s*jirafeauDir\s*=.*$%jirafeauDir=${JIRAFEAU_DIR}%"
}
if [ ! -f "${KAZ_CONF_DIR}/container-mail.list" ]; then
cat > "${KAZ_CONF_DIR}/container-mail.list" <<EOF
# e-mail server composer
postfix
ldap
#sympa
EOF
fi
if [ ! -f "${KAZ_CONF_DIR}/container-orga.list" ]; then
cat > "${KAZ_CONF_DIR}/container-orga.list" <<EOF
# orga composer
EOF
fi
if [ ! -f "${KAZ_CONF_DIR}/container-proxy.list" ]; then
cat > "${KAZ_CONF_DIR}/container-proxy.list" <<EOF
proxy
EOF
fi
if [ ! -f "${KAZ_CONF_DIR}/container-withMail.list" ]; then
cat > "${KAZ_CONF_DIR}/container-withMail.list" <<EOF
web
etherpad
roundcube
framadate
paheko
dokuwiki
gitea
mattermost
cloud
EOF
fi
if [ ! -f "${KAZ_CONF_DIR}/container-withoutMail.list" ]; then
cat > "${KAZ_CONF_DIR}/container-withoutMail.list" <<EOF
jirafeau
ethercalc
collabora
#vigilo
#grav
EOF
fi
if [ ! -d "${KAZ_ROOT}/secret" ]; then
rsync -a "${KAZ_ROOT}/secret.tmpl/" "${KAZ_ROOT}/secret/"
"${KAZ_BIN_DIR}/secretGen.sh"
"${KAZ_BIN_DIR}/createDBUsers.sh"
fi

151
bin2/install.sh Executable file
View File

@@ -0,0 +1,151 @@
g#!/bin/bash
set -e
# on pourra inclure le fichier dockers.env pour
# gérer l' environnement DEV, PROD ou LOCAL
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
export VAGRANT_SRC_DIR=/vagrant/files
cd "${KAZ_ROOT}"
if [ ! -f "${KAZ_ROOT}/config/dockers.env" ]; then
printKazError "dockers.env not found"
exit 1
fi
for type in mail orga proxy withMail withoutMail ; do
if [ ! -f "${KAZ_ROOT}/config/container-${type}.list" ]; then
printKazError "container-${type}.list not found"
exit 1
fi
done
mkdir -p "${KAZ_ROOT}/log/"
export DebugLog="${KAZ_ROOT}/log/log-install-$(date +%y-%m-%d-%T)-"
(
declare -a DOCKERS_LIST NEW_SERVICE
# dockers à démarrer (manque : sympa, wordpress, orga)
DOCKERS_LIST+=($(getList "${KAZ_CONF_DIR}/container-withoutMail.list"))
DOCKERS_LIST+=($(getList "${KAZ_CONF_DIR}/container-proxy.list"))
DOCKERS_LIST+=($(getList "${KAZ_CONF_DIR}/container-mail.list"))
DOCKERS_LIST+=($(getList "${KAZ_CONF_DIR}/container-withMail.list"))
# web proxy postfix sympa roundcube jirafeau ldap quotas cachet ethercalc etherpad framadate paheko dokuwiki gitea mattermost cloud collabora
# 8080 443 8081 8082 8083 8084 8085 8086 8087 8088 8089 8090 8091 8092 8093 8094
# pour ne tester qu'un sous-ensemble de service
if [ $# -ne 0 ]; then
case $1 in
-h*|--h*)
echo $(basename "$0") " [-h] [-help] ([1-9]* | {service...})"
echo " -h"
echo " -help Display this help."
echo " service.. service to enable"
echo " [1-9]* level of predefined services set selection"
exit
;;
0)
echo $(basename "$0"): " level '0' not defined"
exit
;;
[0-9]*)
for level in $(seq 1 $1); do
case ${level} in
1) NEW_SERVICE+=("web" "proxy");;
2) NEW_SERVICE+=("postfix");;
3) NEW_SERVICE+=("roundcube");;
4) NEW_SERVICE+=("sympa");;
5) NEW_SERVICE+=("jirafeau");;
6) NEW_SERVICE+=("ldap");;
7) NEW_SERVICE+=("quotas");;
8) NEW_SERVICE+=("cachet");;
9) NEW_SERVICE+=("ethercalc");;
10) NEW_SERVICE+=("etherpad");;
11) NEW_SERVICE+=("framadate");;
12) NEW_SERVICE+=("paheko");;
13) NEW_SERVICE+=("dokuwiki");;
14) NEW_SERVICE+=("gitea");;
15) NEW_SERVICE+=("mattermost");;
16) NEW_SERVICE+=("collabora");;
17) NEW_SERVICE+=("cloud");;
*)
echo $(basename "$0"): " level '${level}' not defined"
exit
;;
esac
done
DOCKERS_LIST=(${NEW_SERVICE[@]})
printKazMsg "level $1"
;;
*)
# XXX il manque l'extention des noms (jir va fair le start de jirafeau mais pas le download et le first)
DOCKERS_LIST=($*)
;;
esac
fi
DOCKERS_LIST=($(filterAvailableComposes ${DOCKERS_LIST[*]}))
printKazMsg "dockers: ${DOCKERS_LIST[*]}"
# on pré-télécharge à l'origine Vagrant (jirafeau...)
mkdir -p "${KAZ_ROOT}/git" "${KAZ_ROOT}/download"
for DOCKER in ${DOCKERS_LIST[@]}; do
if [ -f "${KAZ_ROOT}/dockers/${DOCKER}/download.sh" ]; then
cd "${KAZ_ROOT}/dockers/${DOCKER}"
./download.sh
fi
done
# on pré-télécharge le dépollueur
if [[ " ${DOCKERS_LIST[*]} " =~ " "(jirafeau|postfix|sympa)" " ]]; then
"${KAZ_BIN_DIR}/installDepollueur.sh"
docker volume create filterConfig
fi
# on sauve les pré-téléchargement pour le prochain lancement de Vagrant
[ -d "${VAGRANT_SRC_DIR}/kaz/download" ] &&
rsync -a "${KAZ_ROOT}/download/" "${VAGRANT_SRC_DIR}/kaz/download/"
[ -d "${VAGRANT_SRC_DIR}/kaz/git" ] &&
rsync -a "${KAZ_ROOT}/git/" "${VAGRANT_SRC_DIR}/kaz/git/"
# on construit les dockers qui contiennent un script de création (etherpad, framadate, jirafeau...)
for DOCKER in ${DOCKERS_LIST[@]}; do
if [ -f "${KAZ_ROOT}/dockers/${DOCKER}/build.sh" ]; then
cd "${KAZ_ROOT}/dockers/${DOCKER}"
./build.sh
fi
done
# on démare les containers de la liste uniquement (en une fois par cohérence de proxy)
# "${KAZ_ROOT}/bin/container.sh" stop ${DOCKERS_LIST[*]}
"${KAZ_ROOT}/bin/container.sh" start ${DOCKERS_LIST[*]}
if [[ " ${DOCKERS_LIST[*]} " =~ " traefik " ]]; then
# on initialise traefik :-(
${KAZ_COMP_DIR}/traefik/first.sh
# on démarre traefik (plus lancé dans container.sh)
docker-compose -f ${KAZ_COMP_DIR}/traefik/docker-compose.yml up -d
fi
if [[ " ${DOCKERS_LIST[*]} " =~ " etherpad " ]]; then
# pb avec la lanteur de démarrage du pad :-(
sleep 5
"${KAZ_ROOT}/bin/container.sh" start etherpad
fi
if [[ " ${DOCKERS_LIST[*]} " =~ " jirafeau " ]]; then
# pb avec la lanteur de démarrage du jirafeau :-(
(cd "${KAZ_COMP_DIR}/jirafeau" ; docker-compose restart)
fi
# on construit les dockers qui contiennent un script de création (etherpad, framadate, jirafeau...)
for DOCKER in ${DOCKERS_LIST[@]}; do
if [ -f "${KAZ_ROOT}/dockers/${DOCKER}/first.sh" ]; then
cd "${KAZ_ROOT}/dockers/${DOCKER}"
./first.sh
fi
done
echo "########## ********** End install $(date +%D-%T)"
) > >(tee ${DebugLog}stdout.log) 2> >(tee ${DebugLog}stderr.log >&2)

27
bin2/installDepollueur.sh Executable file
View File

@@ -0,0 +1,27 @@
#!/bin/bash
SRC_DEP=https://git.kaz.bzh/KAZ/depollueur.git
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
if [[ -f "${KAZ_GIT_DIR}/depollueur/test-running" ]]; then
exit
fi
printKazMsg "\n *** Installation du dépollueur"
sudo apt-get install -y --fix-missing build-essential make g++ libboost-program-options-dev libboost-system-dev libboost-filesystem-dev libcurl4-gnutls-dev libssl-dev
mkdir -p "${KAZ_GIT_DIR}"
cd "${KAZ_GIT_DIR}"
if [ ! -d "depollueur" ]; then
git clone "${SRC_DEP}"
fi
cd depollueur
git reset --hard && git pull
make
. "${DOCKERS_ENV}"
echo "${domain}" > "src/bash/domainname"

182
bin2/interoPaheko.sh Executable file
View File

@@ -0,0 +1,182 @@
#!/bin/bash
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
. $KAZ_KEY_DIR/env-paheko
. $KAZ_KEY_DIR/env-kaz
URL_PAHEKO="$httpProto://${API_USER}:${API_PASSWORD}@kaz-paheko.$(echo $domain)"
PRG=$(basename $0)
RACINE=$(echo $PRG | awk '{print $1}')
TFILE_INT_PAHEKO_ACTION=$(mktemp /tmp/XXXXXXXX_INT_PAHEKO_ACTION.json)
TFILE_INT_PAHEKO_IDFILE=$(mktemp /tmp/XXXXXXXX_TFILE_INT_PAHEKO_IDFILE.json)
FILE_CREATEUSER="$KAZ_ROOT/tmp/createUser.txt"
sep=' '
#trap "rm -f ${TFILE_INT_PAHEKO_IDFILE} ${TFILE_INT_PAHEKO_ACTION} " 0 1 2 3 15
############################################ Fonctions #######################################################
TEXTE="
# -- fichier de création des comptes KAZ
# --
# -- 1 ligne par compte
# -- champs séparés par ;. les espaces en début et en fin sont enlevés
# -- laisser vide si pas de donnée
# -- pas d'espace dans les variables
# --
# -- ORGA: nom de l'organisation (max 15 car), vide sinon
# -- ADMIN_ORGA: O/N indique si le user est admin de l'orga (va le créer comme admin du NC de l'orga et admin de l'équipe agora)
# -- NC_ORGA: O/N indique si l'orga a demandé un NC
# -- PAHEKO_ORGA: O/N indique si l'orga a demandé un paheko
# -- WP_ORGA: O/N indique si l'orga a demandé un wp
# -- AGORA_ORGA: O/N indique si l'orga a demandé un mattermost
# -- WIKI_ORGA: O/N indique si l'orga a demandé un wiki
# -- NC_BASE: O/N indique si le user doit être inscrit dans le NC de base
# -- GROUPE_NC_BASE: soit null soit le groupe dans le NC de base
# -- EQUIPE_AGORA: soit null soit equipe agora (max 15 car)
# -- QUOTA=(1/10/20/...) en GB
# --
# NOM ; PRENOM ; EMAIL_SOUHAITE ; EMAIL_SECOURS ; ORGA ; ADMIN_ORGA ; NC_ORGA ; PAHEKO_ORGA ; WP_ORGA ; AGORA_ORGA ; WIKI_ORGA ; NC_BASE ; GROUPE_NC_BASE ; EQUIPE_AGORA ; QUOTA
#
# exemple pour un compte découverte:
# dupont ; jean-louis; jean-louis.dupont@kaz.bzh ; gregomondo@kaz.bzh; ; N; N; N; N; N; N; O; ; ;1
#
# exemple pour un compte asso de l'orga gogol avec le service dédié NC uniquement + une équipe dans l'agora
# dupont ; jean-louis; jean-louis.dupont@kaz.bzh ; gregomondo@kaz.bzh; gogol ; O; O; N; N; N; N;N;;gogol_team; 10
"
ExpMail() {
MAIL_DEST=$1
MAIL_SUJET=$2
MAIL_TEXTE=$3
printf "Subject:${MAIL_SUJET}\n${MAIL_TEXTE}" | msmtp ${MAIL_DEST}
}
Int_paheko_Action() {
# $1 est une action;
ACTION=$1
OPTION=$2
# on envoie la requête sur le serveur paheko avec la clause à créer
curl -s ${URL_PAHEKO}/api/sql -d "SELECT * from users where action_auto='${ACTION}';" >>${TFILE_INT_PAHEKO_ACTION}
[ ! -z ${TFILE_INT_PAHEKO_ACTION} ] || { echo "probleme de fichier ${TFILE_INT_PAHEKO_ACTION}" ; exit 1;}
REP_ID=$(jq -c '.results[].id ' ${TFILE_INT_PAHEKO_ACTION} 2>/dev/null)
if [ ! -z "${REP_ID}" ]
then
[ "$OPTION" = "silence" ] || echo -e "${RED}Nombre de compte ${ACTION} ${NC}= ${GREEN} $(echo ${REP_ID} | wc -w) ${NC}"
if [ -f "$FILE_CREATEUSER" ]
then
mv $FILE_CREATEUSER $FILE_CREATEUSER.$(date +%d-%m-%Y-%H:%M:%S)
fi
echo "# -------- Fichier généré le $(date +%d-%m-%Y-%H:%M:%S) ----------">${FILE_CREATEUSER}
echo "${TEXTE}" >>${FILE_CREATEUSER}
for VAL_ID in ${REP_ID}
do
jq -c --argjson val "${VAL_ID}" '.results[] | select (.id == $val)' ${TFILE_INT_PAHEKO_ACTION} > ${TFILE_INT_PAHEKO_IDFILE}
for VAL_GAR in id_category action_auto nom email email_secours quota_disque admin_orga nom_orga responsable_organisation responsable_email agora cloud wordpress garradin docuwiki id_service
do
eval $VAL_GAR=$(jq .$VAL_GAR ${TFILE_INT_PAHEKO_IDFILE})
done
################################
# test du mail valide en $domain
echo ${email} | grep -i "${domain}" || { echo "Erreur : le mail ${email} n'est pas en ${domain}"; exit ;}
################################
#comme tout va bien on continue
#on compte le nom de champs dans la zone nom pour gérer les noms et prénoms composés
# si il y a 3 champs, on associe les 2 premieres valeurs avec un - et on laisse le 3ème identique
# si il y a 4 champs on associe les 1 et le 2 avec un tiret et le 3 et 4 avec un tiret
# on met les champs nom_ok et prenom_ok à blanc
nom_ok=""
prenom_ok=""
# on regarde si le nom de l' orga est renseigné ou si le nom de l' orga est null et l' activité de membre est 7 (membre rattaché)
# si c' est le cas alors le nom est le nom de l' orga et le prénom est forcé à la valeur Organisation
if [[ "$nom_orga" = null ]] || [[ "$nom_orga" != null && "$id_service" = "7" ]]
then
[ "$OPTION" = "silence" ] || echo -e "${NC}Abonné ${GREEN}${nom}${NC}"
#si lactivité est membre rattaché on affiche a quelle orga il est rattaché
if [ "$id_service" = "7" ] && [ "$OPTION" != "silence" ] && [ "$nom_orga" != null ]
then
echo -e "${NC}Orga Rattachée : ${GREEN}${nom_orga}${NC}"
fi
COMPTE_NOM=$(echo $nom | awk -F' ' '{for (i=1; i != NF; i++); print i;}')
case "${COMPTE_NOM}" in
0|1)
echo "Il faut corriger le champ nom (il manque un nom ou prénom) de paheko"
echo "je quitte et supprime le fichier ${FILE_CREATEUSER}"
rm -f $FILE_CREATEUSER
exit 2
;;
2)
nom_ok=$(echo $nom | awk -F' ' '{print $1}')
prenom_ok=$(echo $nom | awk -F' ' '{print $2}')
;;
*)
nom_ok=
prenom_ok=
for i in ${nom}; do grep -q '^[A-Z]*$' <<<"${i}" && nom_ok="${nom_ok}${sep}${i}" || prenom_ok="${prenom_ok}${sep}${i}"; done
nom_ok="${nom_ok#${sep}}"
prenom_ok="${prenom_ok#${sep}}"
if [ -z "${nom_ok}" ] || [ -z "${prenom_ok}" ]; then
ERRMSG="Erreur : Il faut corriger le champ nom qui contient plus de 2 infos dans paheko"
[ "${IP_MAIL}" = "true" ] && ExpMail ${IP_MAILDEST} "Erreur interrogation Paheko" "${ERRMSG}" || echo ${ERRMSG}
rm -f $FILE_CREATEUSER
exit
fi
esac
# comme l' orga est à null nom orga est a vide, pas d' admin orga, on met dans l' agora générale
# pas d' équipe agora et de groupe nextcloud spécifique
nom_orga=" "
admin_orga="N"
nc_base="O"
equipe_agora=" "
groupe_nc_base=" "
else
# L' orga est renseigné dans paheko donc les nom et prenoms sont forcé a nom_orga et Organisation
# un équipe agora portera le nom de l' orga, le compte ne sera pas créé dans le nextcloud général
# et le compte est admin de son orga
nom_orga=$(echo $nom_orga | tr [:upper:] [:lower:])
[ "$OPTION" = "silence" ] || echo -e "${NC}Orga : ${GREEN}${nom_orga}${NC}"
nom_ok=$nom_orga
# test des caractères autorisés dans le nom d' orga: lettres, chiffres et/ou le tiret
if ! [[ "${nom_ok}" =~ ^[[:alnum:]-]+$ ]]; then
ERRMSG="Erreur : l' orga doit être avec des lettres et/ou des chiffres. Le séparateur doit être le tiret"
[ "${IP_MAIL}" = "true" ] && ExpMail ${IP_MAILDEST} "Erreur interrogation Paheko" "${ERRMSG}" || echo ${ERRMSG}
rm -f $FILE_CREATEUSER
exit 2
fi
prenom_ok=organisation
equipe_agora=$nom_orga
groupe_nc_base=" "
nc_base="N"
admin_orga="O"
fi
#On met le mail et le mail de secours en minuscules
email=$(echo $email | tr [:upper:] [:lower:])
email_secours=$(echo $email_secours | tr [:upper:] [:lower:])
# Pour le reste on renomme les null en N ( non ) et les valeurs 1 en O ( Oui)
cloud=$(echo $cloud | sed -e 's/0/N/g' | sed -e 's/1/O/g')
paheko=$(echo $garradin | sed -e 's/0/N/g' | sed -e 's/1/O/g')
wordpress=$(echo $wordpress | sed -e 's/0/N/g' | sed -e 's/1/O/g')
agora=$(echo $agora | sed -e 's/0/N/g' | sed -e 's/1/O/g')
docuwiki=$(echo $docuwiki | sed -e 's/0/N/g' | sed -e 's/1/O/g')
# et enfin on écrit dans le fichier
echo "$nom_ok;$prenom_ok;$email;$email_secours;$nom_orga;$admin_orga;$cloud;$paheko;$wordpress;$agora;$docuwiki;$nc_base;$groupe_nc_base;$equipe_agora;$quota_disque">>${FILE_CREATEUSER}
done
else
[ "$OPTION" = "silence" ] || echo "Rien à créer"
exit 2
fi
}
# Main
Int_paheko_Action "A créer" "silence"
exit 0

14
bin2/iptables.sh Executable file
View File

@@ -0,0 +1,14 @@
#!/bin/bash
#cleaning, may throw errors at first launch
#iptables -t nat -D POSTROUTING -o ens18 -j ipbis
#iptables -t nat -F ipbis
#iptables -t nat -X ipbis
iptables -t nat -N ipbis
iptables -t nat -F ipbis
iptables -t nat -I ipbis -o ens18 -p tcp --source `docker inspect -f '{{.NetworkSettings.Networks.sympaNet.IPAddress}}' sympaServ` -j SNAT --to `ifconfig ens18:0 | grep "inet" | awk '{print $2}'`
iptables -t nat -I ipbis -o ens18 -p tcp --source `docker inspect -f '{{.NetworkSettings.Networks.jirafeauNet.IPAddress}}' sympaServ` -j SNAT --to `ifconfig ens18:0 | grep "inet" | awk '{print $2}'`
iptables -t nat -A ipbis -j RETURN
iptables -t nat -D POSTROUTING -o ens18 -j ipbis
iptables -t nat -I POSTROUTING -o ens18 -j ipbis

120
bin2/kazDockerNet.sh Executable file
View File

@@ -0,0 +1,120 @@
#!/bin/bash
#Ki: François
#Kan: 2021
#Koi: gestion des réseaux docker
#15/01/2025: Dernière modif by fab: connecter le réseau de l'orga nouvellement créé au ocntainter Traefik
# faire un completion avec les composant dispo
PRG=$(basename $0)
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
usage () {
echo "Usage: ${PRG} [-n] [-h] list|add [netName]..."
echo " -n : simulation"
echo " -h|--help : help"
echo
echo " create all net : ${PRG} add $(${KAZ_BIN_DIR}/kazList.sh compose validate)"
exit 1
}
allNetName=""
export CMD=""
for ARG in $@; do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-n' )
shift
export SIMU="echo"
;;
-*)
usage
;;
list|add)
CMD="${ARG}"
shift;
;;
*)
allNetName="${allNetName} ${ARG}"
shift
;;
esac
done
if [ -z "${CMD}" ] ; then
usage
fi
# running composes
export allBridgeName="$(docker network list | grep bridge | awk '{print $2}')"
# running network
export allBridgeNet=$(for net in ${allBridgeName} ; do docker inspect ${net} | grep Subnet | sed 's#.*"\([0-9]*\.[0-9]*\.[0-9]*\.[0-9]*/[0-9]*\)".*# \1#'; done)
minB=0
minC=0
minD=0
getNet() {
netName="$1Net"
if [[ "${allBridgeName}" =~ "${netName}" ]]; then
echo "${netName} already created"
return
fi
# echo "start 10.${minB}.${minC}.$((${minD}*16))"
find=""
for b in $(eval echo {${minB}..255}); do
for c in $(eval echo {${minC}..255}); do
for d in $(eval echo {${minD}..15}); do
if [ ! -z "${find}" ]; then
minB=${b}
minC=${c}
minD=${d}
return
fi
# to try
subnet="10.${b}.${c}.$((d*16))"
if [[ "${allBridgeNet}" =~ " ${subnet}/" ]];
then
# used
# XXX check netmask
continue
fi
# the winner is...
echo "${netName} => ${subnet}/28"
${SIMU} docker network create --subnet "${subnet}/28" "${netName}"
#maj du 15/01 by fab (pour éviter de restart le traefik)
${SIMU} docker network connect "${netName}" traefikServ
find="ok"
done
minD=0
done
minC=0
done
}
list () {
echo "name: " ${allBridgeName}
echo "net: " ${allBridgeNet}
}
add () {
if [ -z "${allNetName}" ] ; then
usage
fi
for netName in ${allNetName}; do
getNet "${netName}"
done
}
"${CMD}"

181
bin2/kazList.sh Executable file
View File

@@ -0,0 +1,181 @@
#!/bin/bash
PRG=$(basename $0)
SIMU=""
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
cd "$(dirname $0)"
ALL_STATUS=$(docker ps -a --format "{{.ID}} {{.Names}} {{.Status}}")
SERVICES_CHOICE="$(getAvailableServices | tr "\n" "|")"
SERVICES_CHOICE="${SERVICES_CHOICE%|}"
usage () {
echo "${RED}${BOLD}" \
"Usage: $0 [-h] {compose|orga|service} {available|validate|enable|disable|status} [names]...${NL}" \
" -h help${NL}" \
" compose {available|validate|enable|disable} : list docker-compose name${NL}" \
" compose status : status of docker-compose (default available)${NL}" \
" service {available|validate} : list services name${NL}" \
" service {enable|disable} : list services in orga${NL}" \
" service status : status of services in orga${NL}" \
" service {${SERVICES_CHOICE}}${NL}" \
" : list of orga enable a service ${NL}" \
" [compose] in${NL}" \
" ${CYAN}$((getAvailableComposes;getAvailableOrgas) | tr "\n" " ")${NC}${NL}"
exit 1
}
# ========================================
compose_available () {
echo $*
}
getComposeEnableByProxy () {
onList=$(
for type in ${KAZ_CONF_DIR}/container-*.list ; do
getList "${type}"
done)
local compose
for compose in ${onList} ; do
composeFlag="proxy_${compose//-/_}"
[[ "${!composeFlag}" == "on" ]] && echo ${compose}
done
}
compose_validate () {
echo $(
for type in ${KAZ_CONF_DIR}/container-*.list ; do
getList "${type}"
done | filterInList $*)
}
compose_enable () {
echo $(getComposeEnableByProxy | filterInList $*)
}
compose_disable () {
echo $(getAvailableComposes | filterNotInList $(getComposeEnableByProxy) | filterInList $*)
}
compose_status () {
for compose in $*; do
cd "${KAZ_COMP_DIR}/${compose}"
echo "${compose}:"
for service in $(docker-compose ps --services 2>/dev/null); do
id=$(docker-compose ps -q "${service}" | cut -c 1-12)
if [ -z "${id}" ]; then
echo " - ${RED}${BOLD}[Down]${NC} ${service}"
else
status=$(grep "^${id}\b" <<< "${ALL_STATUS}" | sed "s/.*${id}\s\s*\S*\s\s*\(\S*.*\)/\1/")
COLOR=$([[ "${status}" =~ Up ]] && echo "${GREEN}" || echo "${RED}")
echo " - ${COLOR}${BOLD}[${status}]${NC} ${service}"
fi
done
done
}
# ========================================
service_available () {
echo $(getAvailableServices)
}
service_validate () {
echo $(getAvailableServices)
}
getServiceInOrga () {
for orga in $*; do
[[ "${orga}" = *"-orga" ]] || continue
local ORGA_DIR="${KAZ_COMP_DIR}/${orga}"
ORGA_COMPOSE="${ORGA_DIR}/docker-compose.yml"
[[ -f "${ORGA_COMPOSE}" ]] || continue
for service in $(getAvailableServices); do
case "${service}" in
paheko)
[ -f "${ORGA_DIR}/usePaheko" ] && echo "${service}"
;;
wiki)
grep -q "\s*dokuwiki:" "${ORGA_COMPOSE}" 2>/dev/null && echo "${service}"
;;
wp)
grep -q "\s*wordpress:" "${ORGA_COMPOSE}" 2>/dev/null && echo "${service}"
;;
*)
grep -q "\s*${service}:" "${ORGA_COMPOSE}" 2>/dev/null && echo "${service}"
esac
done
done
}
getOrgaWithService() {
service="$1"
shift
case "${service}" in
wiki) keyword="dokuwiki" ;;
wp) keyword="wordpress" ;;
*) keyword="${service}" ;;
esac
for orga in $*; do
[[ "${orga}" = *"-orga" ]] || continue
local ORGA_DIR="${KAZ_COMP_DIR}/${orga}"
ORGA_COMPOSE="${ORGA_DIR}/docker-compose.yml"
[[ -f "${ORGA_COMPOSE}" ]] || continue
if [ "${service}" = "paheko" ]; then
[ -f "${ORGA_DIR}/usePaheko" ] && echo "${orga}"
else
grep -q "\s*${keyword}:" "${ORGA_COMPOSE}" 2>/dev/null && echo "${orga}"
fi
done
}
service_enable () {
echo $(getServiceInOrga $* | sort -u)
}
service_disable () {
echo $(getAvailableServices | filterNotInList $(getServiceInOrga $*))
}
service_status () {
# ps per enable
echo "*** TODO ***"
}
# ========================================
KAZ_CMD=""
case "$1" in
'-h' | '-help' )
usage
;;
compose|service)
KAZ_CMD="$1"
shift
;;
*)
usage
;;
esac
KAZ_OPT=""
case "$1" in
available|validate|enable|disable|status)
KAZ_OPT="$1"
shift
;;
*)
if [ "${KAZ_CMD}" = "service" ] && [[ $1 =~ ^(${SERVICES_CHOICE})$ ]]; then
KAZ_OPT="$1"
shift
getOrgaWithService "${KAZ_OPT}" $(filterAvailableComposes $*)
exit
fi
usage
;;
esac
${KAZ_CMD}_${KAZ_OPT} $(filterAvailableComposes $*)

11
bin2/ldap/ldap_sauve.sh Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/bash
KAZ_ROOT=/kaz
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
FILE_LDIF=/home/sauve/ldap.ldif
. $DOCKERS_ENV
docker exec -u 0 -i ${ldapServName} slapcat -F /opt/bitnami/openldap/etc/slapd.d -b ${ldap_root} | gzip >${FILE_LDIF}.gz

23
bin2/ldap/ldapvi.sh Executable file
View File

@@ -0,0 +1,23 @@
#!/bin/bash
KAZ_ROOT=/kaz
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
. $KAZ_KEY_DIR/env-ldapServ
LDAP_IP=$(docker inspect -f '{{.NetworkSettings.Networks.ldapNet.IPAddress}}' ldapServ)
read -p "quel éditeur ? [vi] " EDITOR
EDITOR=${EDITOR:-vi}
# if [ ${EDITOR} = 'emacs' ]; then
# echo "ALERTE ALERTE !!! quelqu'un a voulu utiliser emacs :) :) :)"
# exit
# fi
EDITOR=${EDITOR:-vi}
export EDITOR=${EDITOR}
ldapvi -h $LDAP_IP -D "cn=${LDAP_ADMIN_USERNAME},${ldap_root}" -w ${LDAP_ADMIN_PASSWORD} --discover

223
bin2/ldap/migrate_to_ldap.sh Executable file
View File

@@ -0,0 +1,223 @@
#!/bin/bash
echo "ATTENTION ! Il ne faut plus utiliser ce script, il est probable qu'il commence à mettre la grouille avec le LDAP qui vit sa vie..."
exit 1
KAZ_ROOT=/kaz
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
. $KAZ_KEY_DIR/env-ldapServ
. $KAZ_KEY_DIR/env-paheko
ACCOUNTS=/kaz/dockers/postfix/config/postfix-accounts.cf
LDAP_IP=$(docker inspect -f '{{.NetworkSettings.Networks.ldapNet.IPAddress}}' ldapServ)
URL_GARRADIN="$httpProto://${API_USER}:${API_PASSWORD}@kaz-paheko.$(echo $domain)"
# docker exec -i nextcloudDB mysql --user=${nextcloud_MYSQL_USER} --password=${nextcloud_MYSQL_PASSWORD} ${nextcloud_MYSQL_DATABASE} <<< "select * from oc_accounts;" > /tmp/oc_accounts
ERRORS="/tmp/ldap-errors.log"
> ${ERRORS}
mkdir -p /tmp/ldap/
# curl -s ${URL_GARRADIN}/api/sql -d "SELECT * from membres where emails_rattaches LIKE '%mailrattache%';"
for line in `cat ${ACCOUNTS}`
do
mail=$(echo $line | awk -F '|' '{print $1}')
user=$(echo $mail | awk -F '@' '{print $1}')
domain=$(echo $mail | awk -F '@' '{print $2}')
pass=$(echo $line | awk -F '|' '{print $2}' | sed -e "s/SHA512-//")
IDENT_KAZ=
if [ ${mode} = "prod" ]; then
ficheGarradin=$(curl -s ${URL_GARRADIN}/api/sql -d "SELECT * from membres where email='${mail}';")
mailDeSecours=$(echo ${ficheGarradin} | jq .results[0].email_secours | sed -e "s/\"//g")
quota=$(echo ${ficheGarradin} | jq .results[0].quota_disque | sed -e "s/\"//g")
nom=$(echo ${ficheGarradin} | jq .results[0].nom | sed -e "s/\"//g")
nom_orga=$(echo ${ficheGarradin} | jq .results[0].nom_orga | sed -e "s/\"//g")
else
mailDeSecours=${mail}
quota=1
nom=${mail}
nom_orga="null"
fi
if [ "${quota}" = "null" ]; then
quota=1
fi
# nextcloudEnabled=MAYBE
# IDENT_KAZ=$(grep -i \"${mail}\" /tmp/oc_accounts | cut -f1)
#
# if [ ! -z "${IDENT_KAZ}" ]; then # ident Kaz trouvé avec le mail Kaz
# nextcloudEnabled=TRUE
# else # pas trouvé avec le mail Kaz
# if [ "${nom_orga}" != "null" ]; then # c'est une orga, pas de NC
# IDENT_KAZ="null"
# nextcloudEnabled=FALSE
# else # pas trouvé avec le mail Kaz, pas une orga, on retente avec le mail de secours
# IDENT_KAZ=$(grep -i \"${mailDeSecours}\" /tmp/oc_accounts | cut -f1 | head -n1)
# if [ ! -z "${IDENT_KAZ}" ]; then # on a trouvé l'ident kaz chez NC avec le mail de secours
# nextcloudEnabled=TRUE
# else # pas trouvé avec le mail Kaz, pas une orga, pas trouvé avec le mail de secours
# ficheRattache=$(curl -s ${URL_GARRADIN}/api/sql -d "SELECT * from membres where emails_rattaches LIKE '%${mail}%';" | jq ".results | length")
# if [ $ficheRattache != "0" ]; then # c'est un mail rattaché, pas de NC c'est normal
# IDENT_KAZ="null"
# nextcloudEnabled=FALSE
# else # pas trouvé, pas une orga, pas mail rattaché donc souci
# echo "Pas trouvé l'identifiant Kaz nextcloud pour ${mail} / ${mailDeSecours}, on désactive nextcloud pour ce compte" >> ${ERRORS}
# IDENT_KAZ="null"
# nextcloudEnabled=FALSE
# fi
# fi
# fi
# fi
echo -e "\n\ndn: cn=${mail},ou=users,${ldap_root}\n\
changeType: add\n\
objectClass: inetOrgPerson\n\
sn: ${nom}\n\
userPassword: ${pass}\n\
\n\n\
dn: cn=${mail},ou=users,${ldap_root}\n\
changeType: modify\n\
replace: objectClass\n\
objectClass: inetOrgPerson\n\
objectClass: kaznaute\n\
objectClass: PostfixBookMailAccount\n\
objectClass: nextcloudAccount\n\
-\n\
replace: sn\n\
sn: ${nom}\n\
-\n\
replace: mail\n\
mail: ${mail}\n\
-\n\
replace: mailEnabled\n\
mailEnabled: TRUE\n\
-\n\
replace: mailGidNumber\n\
mailGidNumber: 5000\n\
-\n\
replace: mailHomeDirectory\n\
mailHomeDirectory: /var/mail/${domain}/${user}/\n\
-\n\
replace: mailQuota\n\
mailQuota: ${quota}G\n\
-\n\
replace: mailStorageDirectory\n\
mailStorageDirectory: maildir:/var/mail/${domain}/${user}/\n\
-\n\
replace: mailUidNumber\n\
mailUidNumber: 5000\n\
-\n\
replace: nextcloudQuota\n\
nextcloudQuota: ${quota} GB\n\
-\n\
replace: mailDeSecours\n\
mailDeSecours: ${mailDeSecours}\n\
-\n\
replace: quota\n\
quota: ${quota}\n\
-\n\
replace: agoraEnabled\n\
agoraEnabled: TRUE\n\
-\n\
replace: mobilizonEnabled\n\
mobilizonEnabled: TRUE\n\n" | tee /tmp/ldap/${mail}.ldif | ldapmodify -c -H ldap://${LDAP_IP} -D "cn=${LDAP_ADMIN_USERNAME},${ldap_root}" -x -w ${LDAP_ADMIN_PASSWORD}
done
#replace: nextcloudEnabled\n\
#nextcloudEnabled: ${nextcloudEnabled}\n\
#-\n\
#replace: identifiantKaz\n\
#identifiantKaz: ${IDENT_KAZ}\n\
#-\n\
OLDIFS=${IFS}
IFS=$'\n'
# ALIASES est le fichier d'entrée
ALIASES="/kaz/dockers/postfix/config/postfix-virtual.cf"
# ALIASES_WITHLDAP est le fichier de sortie des forwards qu'on ne met pas dans le ldap
ALIASES_WITHLDAP="/kaz/dockers/postfix/config/postfix-virtual-withldap.cf"
# On vide le fichier de sortie avant de commencer
> ${ALIASES_WITHLDAP}
for line in `cat ${ALIASES}`
do
echo "Virtual line is $line"
if [ `grep -v "," <<< $line` ]
then
echo "Alias simple"
mail=$(echo $line | awk -F '[[:space:]]*' '{print $2}')
if [ `grep $mail ${ACCOUNTS}` ]
then
echo "Alias vers un mail local, go ldap"
LIST=""
for alias in `grep ${mail} ${ALIASES} | grep -v "," | cut -d' ' -f1`
do
LIST=${LIST}"mailAlias: $alias\n"
done
echo -e "dn: cn=${mail},ou=users,${ldap_root}\n\
changeType: modify
replace: mailAlias\n\
$LIST\n\n" | ldapmodify -c -H ldap://${LDAP_IP} -D "cn=${LDAP_ADMIN_USERNAME},${ldap_root}" -x -w ${LDAP_ADMIN_PASSWORD}
else
echo "Alias vers un mail externe, go fichier"
echo $line >> ${ALIASES_WITHLDAP}
echo " + intégration LDAP"
src=$(echo $line | awk -F '[[:space:]]*' '{print $1}')
dst=$(echo $line | awk -F '[[:space:]]*' '{print $2}')
echo -e "\n\ndn: cn=${src},ou=mailForwardings,${ldap_root}\n\
changeType: add\n\
objectClass: organizationalRole\n\
\n\n\
dn: cn=${src},ou=mailForwardings,${ldap_root}\n\
changeType: modify\n\
replace: objectClass\n\
objectClass: organizationalRole\n\
objectClass: PostfixBookMailForward\n\
-\n\
replace: mailAlias\n\
mailAlias: ${src}\n\
-\n\
replace: mail\n\
mail: ${dst}\n\n" | ldapmodify -c -H ldap://${LDAP_IP} -D "cn=${LDAP_ADMIN_USERNAME},${ldap_root}" -x -w ${LDAP_ADMIN_PASSWORD}
fi
else
echo "Forward vers plusieurs adresses, on met dans le fichier"
echo $line >> ${ALIASES_WITHLDAP}
echo " + intégration LDAP"
src=$(echo $line | awk -F '[[:space:]]*' '{print $1}')
dst=$(echo $line | awk -F '[[:space:]]*' '{print $2}')
OOLDIFS=${IFS}
IFS=","
LIST=""
for alias in ${dst}
do
LIST=${LIST}"mail: $alias\n"
done
IFS=${OOLDIFS}
echo -e "\n\ndn: cn=${src},ou=mailForwardings,${ldap_root}\n\
changeType: add\n\
objectClass: organizationalRole\n\
\n\n\
dn: cn=${src},ou=mailForwardings,${ldap_root}\n\
changeType: modify\n\
replace: objectClass\n\
objectClass: organizationalRole\n\
objectClass: PostfixBookMailForward\n\
-\n\
replace: mailAlias\n\
mailAlias: ${src}\n\
-\n\
replace: mail\n\
${LIST}\n\n" | ldapmodify -c -H ldap://${LDAP_IP} -D "cn=${LDAP_ADMIN_USERNAME},${ldap_root}" -x -w ${LDAP_ADMIN_PASSWORD}
fi
done
IFS=${OLDIFS}

21
bin2/ldap/tests/nc_orphans.sh Executable file
View File

@@ -0,0 +1,21 @@
#!/bin/bash
KAZ_ROOT=/kaz
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
. $KAZ_KEY_DIR/env-ldapServ
. $KAZ_KEY_DIR/env-nextcloudDB
LDAP_IP=$(docker inspect -f '{{.NetworkSettings.Networks.ldapNet.IPAddress}}' ldapServ)
docker exec -i nextcloudDB mysql --user=${MYSQL_USER} --password=${MYSQL_PASSWORD} ${MYSQL_DATABASE} <<< "select uid from oc_users;" > /tmp/nc_users.txt
OLDIFS=${IFS}
IFS=$'\n'
for line in `cat /tmp/nc_users.txt`; do
result=$(ldapsearch -h $LDAP_IP -D "cn=${LDAP_ADMIN_USERNAME},${ldap_root}" -w ${LDAP_ADMIN_PASSWORD} -b $ldap_root -x "(identifiantKaz=${line})" | grep numEntries)
echo "${line} ${result}" | grep -v "numEntries: 1" | grep -v "^uid"
done
IFS=${OLDIFS}

17
bin2/lib/config.py Normal file
View File

@@ -0,0 +1,17 @@
DOCKERS_ENV = "/kaz/config/dockers.env"
SECRETS = "/kaz/secret/env-{serv}"
def getDockersConfig(key):
with open(DOCKERS_ENV) as config:
for line in config:
if line.startswith(f"{key}="):
return line.split("=", 1)[1].split("#")[0].strip()
raise Exception(f"getDockersConfig(): No config for {key}")
def getSecretConfig(serv, key):
with open(SECRETS.format(serv=serv)) as config:
for line in config:
if line.startswith(f"{key}="):
return line.split("=", 2)[1].split("#")[0].strip()
raise Exception(f"getSecretConfig(): No config for {serv}/{key}")

101
bin2/lib/ldap.py Normal file
View File

@@ -0,0 +1,101 @@
import ldap
from passlib.hash import sha512_crypt
from email_validator import validate_email, EmailNotValidError
import subprocess
from .config import getDockersConfig, getSecretConfig
class Ldap:
def __init__(self):
self.ldap_connection = None
self.ldap_root = getDockersConfig("ldap_root")
self.ldap_admin_username = getSecretConfig("ldapServ", "LDAP_ADMIN_USERNAME")
self.ldap_admin_password = getSecretConfig("ldapServ", "LDAP_ADMIN_PASSWORD")
cmd="docker inspect -f '{{.NetworkSettings.Networks.ldapNet.IPAddress}}' ldapServ"
self.ldap_host = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT).strip().decode()
def __enter__(self):
self.ldap_connection = ldap.initialize(f"ldap://{self.ldap_host}")
self.ldap_connection.simple_bind_s("cn={},{}".format(self.ldap_admin_username, self.ldap_root), self.ldap_admin_password)
return self
def __exit__(self, tp, e, traceback):
self.ldap_connection.unbind_s()
def get_email(self, email):
"""
Vérifier si un utilisateur avec cet email existe dans le LDAP soit comme mail principal soit comme alias
"""
# Créer une chaîne de filtre pour rechercher dans les champs "cn" et "mailAlias"
filter_str = "(|(cn={})(mailAlias={}))".format(email, email)
result = self.ldap_connection.search_s("ou=users,{}".format(self.ldap_root), ldap.SCOPE_SUBTREE, filter_str)
return result
def delete_user(self, email):
"""
Supprimer un utilisateur du LDAP par son adresse e-mail
"""
try:
# Recherche de l'utilisateur
result = self.ldap_connection.search_s("ou=users,{}".format(self.ldap_root), ldap.SCOPE_SUBTREE, "(cn={})".format(email))
if not result:
return False # Utilisateur non trouvé
# Récupération du DN de l'utilisateur
dn = result[0][0]
# Suppression de l'utilisateur
self.ldap_connection.delete_s(dn)
return True # Utilisateur supprimé avec succès
except ldap.NO_SUCH_OBJECT:
return False # Utilisateur non trouvé
except ldap.LDAPError as e:
return False # Erreur lors de la suppression
def create_user(self, email, prenom, nom, password, email_secours, quota):
"""
Créer une nouvelle entrée dans le LDAP pour un nouvel utilisateur. QUESTION: A QUOI SERVENT PRENOM/NOM/IDENT_KAZ DANS LE LDAP ? POURQUOI 3 QUOTA ?
"""
password_chiffre = sha512_crypt.hash(password)
if not validate_email(email) or not validate_email(email_secours):
return False
if self.get_email(email):
return False
# Construire le DN
dn = f"cn={email},ou=users,{self.ldap_root}"
mod_attrs = [
('objectClass', [b'inetOrgPerson', b'PostfixBookMailAccount', b'nextcloudAccount', b'kaznaute']),
('sn', f'{prenom} {nom}'.encode('utf-8')),
('mail', email.encode('utf-8')),
('mailEnabled', b'TRUE'),
('mailGidNumber', b'5000'),
('mailHomeDirectory', f"/var/mail/{email.split('@')[1]}/{email.split('@')[0]}/".encode('utf-8')),
('mailQuota', f'{quota}G'.encode('utf-8')),
('mailStorageDirectory', f"maildir:/var/mail/{email.split('@')[1]}/{email.split('@')[0]}/".encode('utf-8')),
('mailUidNumber', b'5000'),
('mailDeSecours', email_secours.encode('utf-8')),
('identifiantKaz', f'{prenom.lower()}.{nom.lower()}'.encode('utf-8')),
('quota', str(quota).encode('utf-8')),
('nextcloudEnabled', b'TRUE'),
('nextcloudQuota', f'{quota} GB'.encode('utf-8')),
('mobilizonEnabled', b'TRUE'),
('agoraEnabled', b'TRUE'),
('userPassword', f'{{CRYPT}}{password_chiffre}'.encode('utf-8')),
('cn', email.encode('utf-8'))
]
self.ldap_connection.add_s(dn, mod_attrs)
return True

134
bin2/lib/mattermost.py Normal file
View File

@@ -0,0 +1,134 @@
import subprocess
from .config import getDockersConfig, getSecretConfig
mattermost_user = getSecretConfig("mattermostServ", "MM_ADMIN_USER")
mattermost_pass = getSecretConfig("mattermostServ", "MM_ADMIN_PASSWORD")
mattermost_url = f"https://{getDockersConfig('matterHost')}.{getDockersConfig('domain')}"
mmctl = "docker exec -i mattermostServ bin/mmctl"
class Mattermost:
def __init__(self):
pass
def __enter__(self):
self.authenticate()
return self
def __exit__(self, tp, e, traceback):
self.logout()
def authenticate(self):
# Authentification sur MM
cmd = f"{mmctl} auth login {mattermost_url} --name local-server --username {mattermost_user} --password {mattermost_pass}"
subprocess.run(cmd, shell=True, stderr=subprocess.STDOUT, check=True)
def logout(self):
# Authentification sur MM
cmd = f"{mmctl} auth clean"
subprocess.run(cmd, shell=True, stderr=subprocess.STDOUT, check=True)
def post_message(self, message, equipe="kaz", canal="creation-comptes"):
"""
Envoyer un message dans une Equipe/Canal de MM
"""
cmd = f"{mmctl} post create {equipe}:{canal} --message \"{message}\""
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def get_user(self, user):
"""
Le user existe t-il sur MM ?
"""
try:
cmd = f"{mmctl} user search {user} --json"
user_list_output = subprocess.check_output(cmd, shell=True)
return True # Le nom d'utilisateur existe
except subprocess.CalledProcessError:
return False
def create_user(self, user, email, password):
"""
Créer un utilisateur sur MM
"""
cmd = f"{mmctl} user create --email {email} --username {user} --password {password}"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def delete_user(self, email):
"""
Supprimer un utilisateur sur MM
"""
cmd = f"{mmctl} user delete {email} --confirm"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def update_password(self, email, new_password):
"""
Changer un password pour un utilisateur de MM
"""
cmd = f"{mmctl} user change-password {email} --password {new_password}"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def add_user_to_team(self, email, equipe):
"""
Affecte un utilisateur à une équipe MM
"""
cmd = f"{mmctl} team users add {equipe} {email}"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def add_user_to_channel(self, email, equipe, canal):
"""
Affecte un utilisateur à un canal MM
"""
cmd = f'{mmctl} channel users add {equipe}:{canal} {email}'
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def get_teams(self):
"""
Lister les équipes sur MM
"""
cmd = f"{mmctl} team list --disable-pager"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
data_list = output.decode("utf-8").strip().split('\n')
data_list.pop()
return data_list
def create_team(self, equipe, email):
"""
Créer une équipe sur MM et affecter un admin si email est renseigné (set admin marche pô)
"""
#DANGER: l'option --email ne rend pas le user admin de l'équipe comme c'est indiqué dans la doc :(
cmd = f"{mmctl} team create --name {equipe} --display-name {equipe} --private --email {email}"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
#Workaround: on récup l'id du user et de l'équipe pour affecter le rôle "scheme_admin": true, "scheme_user": true avec l'api MM classique.
#TODO:
return output.decode()
def delete_team(self, equipe):
"""
Supprimer une équipe sur MM
"""
cmd = f"{mmctl} team delete {equipe} --confirm"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()

142
bin2/lib/paheko.py Normal file
View File

@@ -0,0 +1,142 @@
import re
import requests
from .config import getDockersConfig, getSecretConfig
paheko_ident = getSecretConfig("paheko", "API_USER")
paheko_pass = getSecretConfig("paheko", "API_PASSWORD")
paheko_auth = (paheko_ident, paheko_pass)
paheko_url = f"https://kaz-paheko.{getDockersConfig('domain')}"
class Paheko:
def get_categories(self):
"""
Récupérer les catégories Paheko avec le compteur associé
"""
api_url = paheko_url + '/api/user/categories'
response = requests.get(api_url, auth=paheko_auth)
if response.status_code == 200:
data = response.json()
return data
else:
return None
def get_categorie_id(self, categorie_name):
categories = self.get_categories()
for categorie in categories.values():
if categorie["name"] == categorie_name:
return categorie["id"]
return None
def get_users_in_categorie(self,categorie):
"""
Afficher les membres d'une catégorie Paheko
"""
if not (isinstance(categorie, int) or categorie.isdigit()):
return 'Id de category non valide', 400
api_url = f"{paheko_url}/api/user/category/{categorie}.json"
response = requests.get(api_url, auth=paheko_auth)
if response.status_code == 200:
data = response.json()
return data
else:
return None
def get_user(self,ident):
"""
Afficher un membre de Paheko par son email kaz ou son numéro ou le non court de l'orga
"""
emailmatchregexp = re.compile(r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$")
if emailmatchregexp.match(ident):
data = { "sql": f"select * from users where email='{ident}' or alias = '{ident}'" }
api_url = paheko_url + '/api/sql/'
response = requests.post(api_url, auth=paheko_auth, data=data)
#TODO: if faut Rechercher count et vérifier que = 1 et supprimer le count=1 dans la réponse
elif ident.isdigit():
api_url = paheko_url + '/api/user/'+ident
response = requests.get(api_url, auth=paheko_auth)
else:
nomorga = re.sub(r'\W+', '', ident) # on vire les caractères non alphanumérique
data = { "sql": f"select * from users where admin_orga=1 and nom_orga='{nomorga}'" }
api_url = paheko_url + '/api/sql/'
response = requests.post(api_url, auth=paheko_auth, data=data)
#TODO:if faut Rechercher count et vérifier que = 1 et supprimer le count=1 dans la réponse
if response.status_code == 200:
data = response.json()
if data["count"] == 1:
return data["results"][0]
elif data["count"] == 0:
return None
else:
return data["results"]
else:
return None
def set_user(self,ident,field,new_value):
"""
Modifie la valeur d'un champ d'un membre paheko (ident= numéro paheko ou email kaz)
"""
#récupérer le numero paheko si on fournit un email kaz
emailmatchregexp = re.compile(r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$")
if emailmatchregexp.match(ident):
data = { "sql": f"select id from users where email='{ident}'" }
api_url = paheko_url + '/api/sql/'
response = requests.post(api_url, auth=paheko_auth, data=data)
if response.status_code == 200:
#on extrait l'id de la réponse
data = response.json()
if data['count'] == 0:
print("email non trouvé")
return None
elif data['count'] > 1:
print("trop de résultat")
return None
else:
#OK
ident = data['results'][0]['id']
else:
print("pas de résultat")
return None
elif not ident.isdigit():
print("Identifiant utilisateur invalide")
return None
regexp = re.compile("[^a-zA-Z0-9 \\r\\n\\t" + re.escape(string.punctuation) + "]")
valeur = regexp.sub('',new_value) # mouais, il faudrait être beaucoup plus précis ici en fonction des champs qu'on accepte...
champ = re.sub(r'\W+','',field) # pas de caractères non alphanumériques ici, dans l'idéal, c'est à choisir dans une liste plutot
api_url = paheko_url + '/api/user/'+str(ident)
payload = {champ: valeur}
response = requests.post(api_url, auth=paheko_auth, data=payload)
return response.json()
def get_users_with_action(self, action):
"""
retourne tous les membres de paheko avec une action à mener (création du compte kaz / modification...)
"""
api_url = paheko_url + '/api/sql/'
payload = { "sql": f"select * from users where action_auto='{action}'" }
response = requests.post(api_url, auth=paheko_auth, data=payload)
if response.status_code == 200:
return response.json()
else:
return None

40
bin2/lib/sympa.py Normal file
View File

@@ -0,0 +1,40 @@
import subprocess
from email_validator import validate_email, EmailNotValidError
from .config import getDockersConfig, getSecretConfig
sympa_user = getSecretConfig("sympaServ", "SOAP_USER")
sympa_pass = getSecretConfig("sympaServ", "SOAP_PASSWORD")
sympa_listmaster = getSecretConfig("sympaServ", "ADMINEMAIL")
sympa_url = f"https://{getDockersConfig('sympaHost')}.{getDockersConfig('domain')}"
sympa_soap = "docker exec -i sympaServ /usr/lib/sympa/bin/sympa_soap_client.pl"
sympa_domain = getDockersConfig('domain_sympa')
sympa_liste_info = "infos"
# docker exec -i sympaServ /usr/lib/sympa/bin/sympa_soap_client.pl --soap_url=${httpProto}://${URL_LISTE}/sympasoap --trusted_application=${sympa_SOAP_USER} --trusted_application_password=${sympa_SOAP_PASSWORD} --proxy_vars=\"USER_EMAIL=${LISTMASTER}\" --service=add --service_parameters=\"${NL_LIST},${EMAIL_SOUHAITE}\"" | tee -a "${CMD_SYMPA}"
class Sympa:
def _execute_sympa_command(self, email, liste, service):
if validate_email(email) and validate_email(liste):
cmd = f'{sympa_soap} --soap_url={sympa_url}/sympasoap --trusted_application={sympa_user} --trusted_application_password={sympa_pass} --proxy_vars=USER_EMAIL={sympa_listmaster} --service={service} --service_parameters="{liste},{email}" && echo $?'
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
return output.decode()
def add_email_to_list(self, email, liste=sympa_liste_info):
"""
Ajouter un email dans une liste sympa
"""
output = self._execute_sympa_command(email, f"{liste}@{sympa_domain}", 'add')
return output
def delete_email_from_list(self, email, liste=sympa_liste_info):
"""
Supprimer un email dans une liste sympa
"""
output = self._execute_sympa_command(email, f"{liste}@{sympa_domain}", 'del')
return output

8
bin2/lib/template.py Normal file
View File

@@ -0,0 +1,8 @@
import jinja2
templateLoader = jinja2.FileSystemLoader(searchpath="../templates")
templateEnv = jinja2.Environment(loader=templateLoader)
def render_template(filename, args):
template = templateEnv.get_template(filename)
return template.render(args)

213
bin2/lib/user.py Normal file
View File

@@ -0,0 +1,213 @@
from email_validator import validate_email, EmailNotValidError
from glob import glob
import tempfile
import subprocess
import re
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
import smtplib
from .paheko import Paheko
from .ldap import Ldap
from .mattermost import Mattermost
from .sympa import Sympa
from .template import render_template
from .config import getDockersConfig, getSecretConfig
DEFAULT_FILE = "/kaz/tmp/createUser.txt"
webmail_url = f"https://webmail.{getDockersConfig('domain')}"
mattermost_url = f"https://agora.{getDockersConfig('domain')}"
mdp_url = f"https://mdp.{getDockersConfig('domain')}"
sympa_url = f"https://listes.{getDockersConfig('domain')}"
site_url = f"https://{getDockersConfig('domain')}"
cloud_url = f"https://cloud.{getDockersConfig('domain')}"
def _generate_password(self):
cmd="apg -n 1 -m 10 -M NCL -d"
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
new_password="_"+output.decode("utf-8")+"_"
return new_password
def create_user(email, email_secours, admin_orga, nom_orga, quota_disque, nom, prenom, nc_orga, garradin_orga, wp_orga, agora_orga, wiki_orga, nc_base, groupe_nc_base, equipe_agora, password=None):
email = email.lower()
with Ldap() as ldap:
# est-il déjà dans le ldap ? (mail ou alias)
if ldap.get_email(email):
print(f"ERREUR 1: {email} déjà existant dans ldap. on arrête tout")
return None
#test nom orga
if admin_orga == 1:
if nom_orga is None:
print(f"ERREUR 0 sur paheko: {email} : nom_orga vide, on arrête tout")
return
if not bool(re.match(r'^[a-z0-9-]+$', nom_orga)):
print(f"ERREUR 0 sur paheko: {email} : nom_orga ({tab['nom_orga']}) incohérent (minuscule/chiffre/-), on arrête tout")
return
#test email_secours
email_secours = email_secours.lower()
if not validate_email(email_secours):
print("Mauvais email de secours")
return
#test quota
quota = quota_disque
if not quota.isdigit():
print(f"ERREUR 2: quota non numérique : {quota}, on arrête tout")
return
#on génère un password
password = password or _generate_password()
#on créé dans le ldap
#à quoi servent prenom/nom dans le ldap ?
data = {
"prenom": prenom,
"nom": nom,
"password": password,
"email_secours": email_secours,
"quota": quota
}
if not ldap.create_user(email, **data):
print("Erreur LDAP")
return
with Mattermost() as mm:
#on créé dans MM
user = email.split('@')[0]
mm.create_user(user, email, password)
mm.add_user_to_team(email, "kaz")
#et aux 2 canaux de base
mm.add_user_to_channel(email, "kaz", "une-question--un-soucis")
mm.add_user_to_channel(email, "kaz", "cafe-du-commerce--ouvert-2424h")
#on créé une nouvelle équipe ds MM si besoin
if admin_orga == 1:
mm.create_team(nom_orga, email)
#BUG: créer la nouvelle équipe n'a pas rendu l'email admin, on le rajoute comme membre simple
mm.add_user_to_team(email, nom_orga)
#on inscrit email et email_secours à la nl sympa_liste_info
sympa = Sympa()
sympa.add_email_to_list(email)
sympa.add_email_to_list(email_secours)
#on construit/envoie le mail
context = {
'ADMIN_ORGA': admin_orga,
'NOM': f"{prenom} {nom}",
'EMAIL_SOUHAITE': email,
'PASSWORD': password,
'QUOTA': quota_disque,
'URL_WEBMAIL': webmail_url,
'URL_AGORA': mattermost_url,
'URL_MDP': mdp_url,
'URL_LISTE': sympa_url,
'URL_SITE': site_url,
'URL_CLOUD': cloud_url,
}
html = render_template("email_inscription.html", context)
raw = render_template("email_inscription.txt", context)
message = MIMEMultipart()
message["Subject"] = "KAZ: confirmation d'inscription !"
message["From"] = f"contact@{getDockersConfig('domain')}"
message["To"] = f"{email}, {email_secours}"
message.attach(MIMEText(raw, "plain"))
message.attach(MIMEText(html, "html"))
with smtplib.SMTP(f"mail.{getDockersConfig('domain')}", 25) as server:
server.sendmail(f"contact@{getDockersConfig('domain')}", [email,email_secours], message.as_string())
#on met le flag paheko action à Aucune
paheko = Paheko()
try:
paheko.set_user(email, "action_auto", "Aucune")
except:
print(f"Erreur paheko pour remettre action_auto = Aucune pour {email}")
#on post sur MM pour dire ok
with Mattermost() as mm:
msg=f"**POST AUTO** Inscription réussie pour {email} avec le secours {email_secours} Bisou!"
mm.post_message(message=msg)
def create_waiting_users():
"""
Créé les kaznautes en attente: inscription sur MM / Cloud / email + msg sur MM + email à partir de action="a créer" sur paheko
"""
#verrou pour empêcher de lancer en même temps la même api
prefixe="create_user_lock_"
if glob(f"{tempfile.gettempdir()}/{prefixe}*"):
print("Lock présent")
return None
lock_file = tempfile.NamedTemporaryFile(prefix=prefixe,delete=True)
#qui sont les kaznautes à créer ?
paheko = Paheko()
liste_kaznautes = paheko.get_users_with_action("A créer")
if liste_kaznautes:
count=liste_kaznautes['count']
if count==0:
print("aucun nouveau kaznaute à créer")
return
#au moins un kaznaute à créer
for tab in liste_kaznautes['results']:
create_user(**tab)
print("fin des inscriptions")
def create_users_from_file(file=DEFAULT_FILE):
"""
Créé les kaznautes en attente: inscription sur MM / Cloud / email + msg sur MM + email à partir du ficher
"""
#verrou pour empêcher de lancer en même temps la même api
prefixe="create_user_lock_"
if glob(f"{tempfile.gettempdir()}/{prefixe}*"):
print("Lock présent")
return None
lock_file = tempfile.NamedTemporaryFile(prefix=prefixe,delete=True)
#qui sont les kaznautes à créer ?
liste_kaznautes = []
with open(file) as lines:
for line in lines:
line = line.strip()
if not line.startswith("#") and line != "":
user_data = line.split(';')
user_dict = {
"nom": user_data[0],
"prenom": user_data[1],
"email": user_data[2],
"email_secours": user_data[3],
"nom_orga": user_data[4],
"admin_orga": user_data[5],
"nc_orga": user_data[6],
"garradin_orga": user_data[7],
"wp_orga": user_data[8],
"agora_orga": user_data[9],
"wiki_orga": user_data[10],
"nc_base": user_data[11],
"groupe_nc_base": user_data[12],
"equipe_agora": user_data[13],
"quota_disque": user_data[14],
"password": user_data.get(15),
}
liste_kaznautes.append(user_dict)
if liste_kaznautes:
for tab in liste_kaznautes:
create_user(**tab)
print("fin des inscriptions")

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.8 KiB

View File

@@ -0,0 +1,72 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
version="1.0"
width="640.000000pt"
height="1280.000000pt"
viewBox="0 0 640.000000 1280.000000"
preserveAspectRatio="xMidYMid meet"
id="svg18"
sodipodi:docname="logo.svg"
xml:space="preserve"
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs22" /><sodipodi:namedview
id="namedview20"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="pt"
showgrid="false"
inkscape:zoom="0.36440298"
inkscape:cx="428.09749"
inkscape:cy="753.28693"
inkscape:window-width="1920"
inkscape:window-height="1032"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg18" /><g
transform="translate(0.000000,1280.000000) scale(0.100000,-0.100000)"
fill="#000000"
stroke="none"
id="g16"><path
d="M1450 12780 c-28 -28 -38 -56 -65 -190 -70 -337 -103 -1013 -111 -2260 l-6 -785 69 -32 c218 -103 428 -212 520 -270 133 -84 182 -103 396 -159 350 -91 440 -127 536 -211 65 -57 94 -102 118 -184 l19 -64 274 0 274 0 17 60 c31 102 63 147 162 221 77 57 183 96 406 150 386 92 459 117 766 259 262 121 388 175 408 175 23 0 24 44 15 915 -13 1328 -47 1950 -124 2250 -35 134 -58 158 -129 130 -78 -29 -169 -147 -453 -587 -216 -333 -330 -485 -385 -512 -32 -15 -76 -19 -265 -27 -269 -10 -1017 -10 -1288 0 -176 6 -194 8 -240 32 -64 33 -95 73 -419 559 -272 408 -340 497 -406 530 -48 25 -64 25 -89 0z m197 -402 c50 -55 184 -325 220 -445 22 -76 23 -250 0 -309 -41 -108 -100 -164 -173 -164 -50 0 -67 17 -88 90 -24 80 -72 370 -92 549 -20 188 -16 257 16 283 32 26 91 24 117 -4z m3353 12 c29 -16 43 -84 36 -170 -20 -223 -80 -593 -112 -693 -18 -54 -36 -67 -93 -67 -53 0 -110 49 -148 128 -22 47 -28 75 -31 154 -7 150 13 217 125 437 52 102 102 194 110 204 18 19 83 23 113 7z m-2675 -1975 c22 -4 67 -21 99 -38 95 -51 142 -134 117 -209 -14 -41 -72 -103 -120 -127 -46 -23 -139 -47 -148 -38 -3 4 5 20 20 35 37 39 59 98 59 162 1 82 -17 134 -62 182 -45 46 -47 51 -22 45 9 -3 35 -8 57 -12z m-199 -36 c-48 -56 -59 -97 -54 -188 5 -78 19 -118 58 -159 12 -13 20 -25 18 -28 -7 -6 -108 26 -137 44 -75 46 -111 99 -111 162 0 49 17 84 63 128 36 35 136 81 175 82 23 0 22 -2 -12 -41z m1984 37 c0 -3 -13 -19 -30 -37 -79 -87 -79 -255 1 -340 16 -18 28 -33 26 -36 -2 -2 -34 6 -72 18 -79 24 -157 89 -176 146 -30 92 44 191 179 239 37 13 72 18 72 10z m223 -16 c60 -23 130 -78 152 -121 19 -36 19 -102 1 -138 -17 -32 -73 -84 -114 -105 -59 -30 -193 -45 -140 -15 10 5 29 30 44 54 54 94 39 240 -33 310 l-36 35 36 0 c20 0 60 -9 90 -20z m-1089 -306 c9 -3 59 -69 113 -146 53 -77 106 -146 118 -154 11 -7 39 -16 60 -19 52 -6 75 -34 75 -92 0 -67 -31 -93 -111 -93 -36 0 -75 7 -96 17 -34 16 -117 106 -170 186 -14 20 -29 37 -33 37 -4 0 -19 -17 -32 -37 -44 -66 -118 -147 -160 -175 -34 -23 -52 -28 -105 -28 -55 0 -68 4 -88 25 -20 19 -25 34 -25 73 0 60 17 78 84 91 57 10 72 26 186 193 44 64 85 119 92 121 19 8 74 8 92 1z"
id="path2" /><path
d="M1130 9411 c-340 -102 -544 -266 -581 -467 -34 -181 103 -365 345 -466 158 -65 303 -89 656 -108 569 -31 954 -31 1065 1 190 54 276 205 195 345 -73 128 -194 193 -509 274 -286 74 -348 98 -515 199 -188 114 -457 243 -515 248 -30 2 -79 -7 -141 -26z"
id="path4" /><path
d="M5105 9354 c-550 -254 -547 -253 -925 -345 -236 -57 -301 -77 -377 -114 -204 -100 -290 -244 -223 -374 39 -78 115 -129 232 -156 99 -23 629 -25 838 -4 52 5 187 14 300 19 292 14 442 43 596 116 222 105 340 285 304 461 -32 154 -131 268 -311 356 -111 54 -240 97 -290 97 -13 -1 -78 -26 -144 -56z"
id="path6" /><path
d="M1200 6430 l0 -1120 165 0 165 0 2 496 3 496 206 -494 207 -493 181 -3 181 -2 -6 27 c-4 16 -12 39 -20 53 -7 14 -116 264 -243 555 l-231 530 216 510 c118 281 220 518 225 527 19 37 13 38 -177 36 l-186 -3 -177 -463 -176 -462 -3 465 -2 465 -165 0 -165 0 0 -1120z"
id="path8" /><path
d="M3005 7528 c-3 -13 -97 -504 -210 -1093 -113 -588 -208 -1082 -211 -1097 l-6 -28 161 0 c89 0 161 3 162 8 0 4 15 97 32 207 l32 200 212 3 213 2 5 -22 c3 -13 16 -95 30 -183 14 -88 28 -172 31 -187 l6 -28 160 0 160 0 -5 23 c-3 12 -99 508 -212 1102 -113 594 -208 1088 -211 1098 -5 15 -22 17 -174 17 -168 0 -170 0 -175 -22z m258 -1010 c43 -266 78 -486 77 -490 0 -5 -75 -8 -166 -8 l-166 0 5 26 c2 14 38 234 78 490 42 263 78 464 84 464 6 0 44 -209 88 -482z"
id="path10" /><path
d="M4150 7399 l0 -150 290 3 c286 3 290 3 284 -17 -4 -11 -144 -402 -312 -870 l-305 -850 -5 -102 -4 -103 516 0 516 0 0 151 0 150 -315 -3 c-173 -2 -315 -2 -315 -1 0 1 120 334 266 740 380 1054 364 1004 364 1113 l0 90 -490 0 -490 0 0 -151z"
id="path12" /><path
d="M1251 4291 c-15 -4 -27 -17 -31 -31 -12 -46 -20 -984 -10 -1269 30 -885 107 -1328 426 -2460 109 -387 161 -468 329 -516 169 -50 310 55 370 275 26 92 31 634 14 1355 -7 308 -13 585 -14 615 l0 55 256 3 257 2 6 -142 c35 -740 202 -1240 531 -1587 69 -73 201 -179 262 -210 100 -50 231 -33 303 39 22 22 42 40 44 40 2 0 9 -37 16 -82 20 -144 63 -245 129 -306 111 -103 309 -85 415 39 68 80 92 145 196 525 282 1027 353 1413 390 2109 16 300 10 1519 -8 1537 -11 11 -373 13 -1935 14 -1057 0 -1933 -2 -1946 -5z m2775 -2093 c-14 -256 -26 -764 -26 -1055 0 -167 -4 -303 -8 -303 -5 0 -17 10 -28 21 -10 12 -47 41 -80 65 -128 92 -247 242 -324 408 -106 228 -160 493 -184 904 l-5 82 330 0 331 0 -6 -122z"
id="path14" /></g><image
width="156.53996"
height="217.31963"
preserveAspectRatio="none"
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAfCAYAAAD9cg1AAAAABHNCSVQICAgIfAhkiAAAAghJREFU
SIm1lrFrFFEQxn+bi/E0EAU7ESxsEgsJFyGNQuwVFEQQrC1S2tjnHxCEWFmlFC3Ezlo8rwmiqJfG
Iu2JqHARzO39LLIJu2/vdt8lOjDcze033/duHjOzUGPiBXFNbIs/xL74XtwQV8SkjmMc8XHxkfhH
tMI3xdak5HPimxrivO+I12PJE/HVBOT73hcvxQjcHZH8TrwjzmaYJXFdHAS4tzECH4Kk5+L0GOwN
cTfAX6kivxiAv4unag70OMhZzz+fCvBLQfwsIflZJQA8DeLLVQKng/hzDTnApyA+VyUwDOITEQIn
g3inSuBjEK9ECFwL4q9jkeJs1jT7FzYUx4qIzayT85f8sPI44pMg4Zt4dQRubkRD/hbP1gmcz4Za
PjEVX4oPxPvZjOqNaMi1SvKcyM2sPJOMitfisSiBTOSW+CuS/MX+GJnI3NsFG44f2V3xnjU7IQm+
l8DiGfFLQH47kpNd9hrM3GcKDLJnKTDs0CmcfpHFNHuWZnkFfMbjNHAwKROSpEULyg3ILMUSL7Aw
1aCR/6kBsM02PXpBBTJv0px0yZR8lVXznKWTHtWS4Ar/u0BhU6WkdOgwTXmBzTNfuIcuXfr0S7iw
/pCrV5W3aRdqvcxyVN6hSzRgEIWLFhAL8bC0m44oUCf4zwVibeT7zijbYosZZg7ineLqHWsJRP7X
Q9pfySZ/u3a+10wAAAAASUVORK5CYII=
"
id="image32"
x="233.91249"
y="24.324821" /></svg>

After

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 72 KiB

BIN
bin2/look/greve/kazdate.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

BIN
bin2/look/greve/kazmel.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

85
bin2/look/greve/logo.svg Normal file
View File

@@ -0,0 +1,85 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
version="1.0"
width="640.000000pt"
height="1280.000000pt"
viewBox="0 0 640.000000 1280.000000"
preserveAspectRatio="xMidYMid meet"
id="svg18"
sodipodi:docname="logo.svg"
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs22">
<rect
x="179.93953"
y="196.11891"
width="517.72233"
height="120.71095"
id="rect248" />
<rect
x="144.39207"
y="193.77589"
width="604.30237"
height="115.55072"
id="rect182" />
</defs>
<sodipodi:namedview
id="namedview20"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="pt"
showgrid="false"
inkscape:zoom="0.48229806"
inkscape:cx="427.12177"
inkscape:cy="902.9686"
inkscape:window-width="1920"
inkscape:window-height="1032"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg18" />
<g
transform="translate(0.000000,1280.000000) scale(0.100000,-0.100000)"
fill="#000000"
stroke="none"
id="g16">
<path
d="M1450 12780 c-28 -28 -38 -56 -65 -190 -70 -337 -103 -1013 -111 -2260 l-6 -785 69 -32 c218 -103 428 -212 520 -270 133 -84 182 -103 396 -159 350 -91 440 -127 536 -211 65 -57 94 -102 118 -184 l19 -64 274 0 274 0 17 60 c31 102 63 147 162 221 77 57 183 96 406 150 386 92 459 117 766 259 262 121 388 175 408 175 23 0 24 44 15 915 -13 1328 -47 1950 -124 2250 -35 134 -58 158 -129 130 -78 -29 -169 -147 -453 -587 -216 -333 -330 -485 -385 -512 -32 -15 -76 -19 -265 -27 -269 -10 -1017 -10 -1288 0 -176 6 -194 8 -240 32 -64 33 -95 73 -419 559 -272 408 -340 497 -406 530 -48 25 -64 25 -89 0z m197 -402 c50 -55 184 -325 220 -445 22 -76 23 -250 0 -309 -41 -108 -100 -164 -173 -164 -50 0 -67 17 -88 90 -24 80 -72 370 -92 549 -20 188 -16 257 16 283 32 26 91 24 117 -4z m3353 12 c29 -16 43 -84 36 -170 -20 -223 -80 -593 -112 -693 -18 -54 -36 -67 -93 -67 -53 0 -110 49 -148 128 -22 47 -28 75 -31 154 -7 150 13 217 125 437 52 102 102 194 110 204 18 19 83 23 113 7z m-2675 -1975 c22 -4 67 -21 99 -38 95 -51 142 -134 117 -209 -14 -41 -72 -103 -120 -127 -46 -23 -139 -47 -148 -38 -3 4 5 20 20 35 37 39 59 98 59 162 1 82 -17 134 -62 182 -45 46 -47 51 -22 45 9 -3 35 -8 57 -12z m-199 -36 c-48 -56 -59 -97 -54 -188 5 -78 19 -118 58 -159 12 -13 20 -25 18 -28 -7 -6 -108 26 -137 44 -75 46 -111 99 -111 162 0 49 17 84 63 128 36 35 136 81 175 82 23 0 22 -2 -12 -41z m1984 37 c0 -3 -13 -19 -30 -37 -79 -87 -79 -255 1 -340 16 -18 28 -33 26 -36 -2 -2 -34 6 -72 18 -79 24 -157 89 -176 146 -30 92 44 191 179 239 37 13 72 18 72 10z m223 -16 c60 -23 130 -78 152 -121 19 -36 19 -102 1 -138 -17 -32 -73 -84 -114 -105 -59 -30 -193 -45 -140 -15 10 5 29 30 44 54 54 94 39 240 -33 310 l-36 35 36 0 c20 0 60 -9 90 -20z m-1089 -306 c9 -3 59 -69 113 -146 53 -77 106 -146 118 -154 11 -7 39 -16 60 -19 52 -6 75 -34 75 -92 0 -67 -31 -93 -111 -93 -36 0 -75 7 -96 17 -34 16 -117 106 -170 186 -14 20 -29 37 -33 37 -4 0 -19 -17 -32 -37 -44 -66 -118 -147 -160 -175 -34 -23 -52 -28 -105 -28 -55 0 -68 4 -88 25 -20 19 -25 34 -25 73 0 60 17 78 84 91 57 10 72 26 186 193 44 64 85 119 92 121 19 8 74 8 92 1z"
id="path2" />
<path
d="M1130 9411 c-340 -102 -544 -266 -581 -467 -34 -181 103 -365 345 -466 158 -65 303 -89 656 -108 569 -31 954 -31 1065 1 190 54 276 205 195 345 -73 128 -194 193 -509 274 -286 74 -348 98 -515 199 -188 114 -457 243 -515 248 -30 2 -79 -7 -141 -26z"
id="path4" />
<path
d="M5105 9354 c-550 -254 -547 -253 -925 -345 -236 -57 -301 -77 -377 -114 -204 -100 -290 -244 -223 -374 39 -78 115 -129 232 -156 99 -23 629 -25 838 -4 52 5 187 14 300 19 292 14 442 43 596 116 222 105 340 285 304 461 -32 154 -131 268 -311 356 -111 54 -240 97 -290 97 -13 -1 -78 -26 -144 -56z"
id="path6" />
<path
d="M1200 6430 l0 -1120 165 0 165 0 2 496 3 496 206 -494 207 -493 181 -3 181 -2 -6 27 c-4 16 -12 39 -20 53 -7 14 -116 264 -243 555 l-231 530 216 510 c118 281 220 518 225 527 19 37 13 38 -177 36 l-186 -3 -177 -463 -176 -462 -3 465 -2 465 -165 0 -165 0 0 -1120z"
id="path8" />
<path
d="M3005 7528 c-3 -13 -97 -504 -210 -1093 -113 -588 -208 -1082 -211 -1097 l-6 -28 161 0 c89 0 161 3 162 8 0 4 15 97 32 207 l32 200 212 3 213 2 5 -22 c3 -13 16 -95 30 -183 14 -88 28 -172 31 -187 l6 -28 160 0 160 0 -5 23 c-3 12 -99 508 -212 1102 -113 594 -208 1088 -211 1098 -5 15 -22 17 -174 17 -168 0 -170 0 -175 -22z m258 -1010 c43 -266 78 -486 77 -490 0 -5 -75 -8 -166 -8 l-166 0 5 26 c2 14 38 234 78 490 42 263 78 464 84 464 6 0 44 -209 88 -482z"
id="path10" />
<path
d="M4150 7399 l0 -150 290 3 c286 3 290 3 284 -17 -4 -11 -144 -402 -312 -870 l-305 -850 -5 -102 -4 -103 516 0 516 0 0 151 0 150 -315 -3 c-173 -2 -315 -2 -315 -1 0 1 120 334 266 740 380 1054 364 1004 364 1113 l0 90 -490 0 -490 0 0 -151z"
id="path12" />
<path
d="M1251 4291 c-15 -4 -27 -17 -31 -31 -12 -46 -20 -984 -10 -1269 30 -885 107 -1328 426 -2460 109 -387 161 -468 329 -516 169 -50 310 55 370 275 26 92 31 634 14 1355 -7 308 -13 585 -14 615 l0 55 256 3 257 2 6 -142 c35 -740 202 -1240 531 -1587 69 -73 201 -179 262 -210 100 -50 231 -33 303 39 22 22 42 40 44 40 2 0 9 -37 16 -82 20 -144 63 -245 129 -306 111 -103 309 -85 415 39 68 80 92 145 196 525 282 1027 353 1413 390 2109 16 300 10 1519 -8 1537 -11 11 -373 13 -1935 14 -1057 0 -1933 -2 -1946 -5z m2775 -2093 c-14 -256 -26 -764 -26 -1055 0 -167 -4 -303 -8 -303 -5 0 -17 10 -28 21 -10 12 -47 41 -80 65 -128 92 -247 242 -324 408 -106 228 -160 493 -184 904 l-5 82 330 0 331 0 -6 -122z"
id="path14" />
</g>
<text
xml:space="preserve"
transform="matrix(0.96846201,0,0,0.86019954,-43.878364,-38.095408)"
id="text246"
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:106.667px;line-height:125%;font-family:'Arial Black';-inkscape-font-specification:'Arial Black, ';font-variant-ligatures:normal;font-variant-caps:normal;font-variant-numeric:normal;font-feature-settings:normal;text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;white-space:pre;shape-inside:url(#rect248);fill:#00ff00;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"><tspan
x="179.93945"
y="291.75453"
id="tspan371">GREVE</tspan></text>
</svg>

After

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

BIN
bin2/look/kaz/kaz-tete.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 71 KiB

BIN
bin2/look/kaz/kazdate.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

BIN
bin2/look/kaz/kazmel.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.2 KiB

72
bin2/look/kaz/logo.svg Normal file
View File

@@ -0,0 +1,72 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
width="640.000000pt" height="1280.000000pt" viewBox="0 0 640.000000 1280.000000"
preserveAspectRatio="xMidYMid meet">
<g transform="translate(0.000000,1280.000000) scale(0.100000,-0.100000)"
fill="#000000" stroke="none">
<path d="M1450 12780 c-28 -28 -38 -56 -65 -190 -70 -337 -103 -1013 -111
-2260 l-6 -785 69 -32 c218 -103 428 -212 520 -270 133 -84 182 -103 396 -159
350 -91 440 -127 536 -211 65 -57 94 -102 118 -184 l19 -64 274 0 274 0 17 60
c31 102 63 147 162 221 77 57 183 96 406 150 386 92 459 117 766 259 262 121
388 175 408 175 23 0 24 44 15 915 -13 1328 -47 1950 -124 2250 -35 134 -58
158 -129 130 -78 -29 -169 -147 -453 -587 -216 -333 -330 -485 -385 -512 -32
-15 -76 -19 -265 -27 -269 -10 -1017 -10 -1288 0 -176 6 -194 8 -240 32 -64
33 -95 73 -419 559 -272 408 -340 497 -406 530 -48 25 -64 25 -89 0z m197
-402 c50 -55 184 -325 220 -445 22 -76 23 -250 0 -309 -41 -108 -100 -164
-173 -164 -50 0 -67 17 -88 90 -24 80 -72 370 -92 549 -20 188 -16 257 16 283
32 26 91 24 117 -4z m3353 12 c29 -16 43 -84 36 -170 -20 -223 -80 -593 -112
-693 -18 -54 -36 -67 -93 -67 -53 0 -110 49 -148 128 -22 47 -28 75 -31 154
-7 150 13 217 125 437 52 102 102 194 110 204 18 19 83 23 113 7z m-2675
-1975 c22 -4 67 -21 99 -38 95 -51 142 -134 117 -209 -14 -41 -72 -103 -120
-127 -46 -23 -139 -47 -148 -38 -3 4 5 20 20 35 37 39 59 98 59 162 1 82 -17
134 -62 182 -45 46 -47 51 -22 45 9 -3 35 -8 57 -12z m-199 -36 c-48 -56 -59
-97 -54 -188 5 -78 19 -118 58 -159 12 -13 20 -25 18 -28 -7 -6 -108 26 -137
44 -75 46 -111 99 -111 162 0 49 17 84 63 128 36 35 136 81 175 82 23 0 22 -2
-12 -41z m1984 37 c0 -3 -13 -19 -30 -37 -79 -87 -79 -255 1 -340 16 -18 28
-33 26 -36 -2 -2 -34 6 -72 18 -79 24 -157 89 -176 146 -30 92 44 191 179 239
37 13 72 18 72 10z m223 -16 c60 -23 130 -78 152 -121 19 -36 19 -102 1 -138
-17 -32 -73 -84 -114 -105 -59 -30 -193 -45 -140 -15 10 5 29 30 44 54 54 94
39 240 -33 310 l-36 35 36 0 c20 0 60 -9 90 -20z m-1089 -306 c9 -3 59 -69
113 -146 53 -77 106 -146 118 -154 11 -7 39 -16 60 -19 52 -6 75 -34 75 -92 0
-67 -31 -93 -111 -93 -36 0 -75 7 -96 17 -34 16 -117 106 -170 186 -14 20 -29
37 -33 37 -4 0 -19 -17 -32 -37 -44 -66 -118 -147 -160 -175 -34 -23 -52 -28
-105 -28 -55 0 -68 4 -88 25 -20 19 -25 34 -25 73 0 60 17 78 84 91 57 10 72
26 186 193 44 64 85 119 92 121 19 8 74 8 92 1z"/>
<path d="M1130 9411 c-340 -102 -544 -266 -581 -467 -34 -181 103 -365 345
-466 158 -65 303 -89 656 -108 569 -31 954 -31 1065 1 190 54 276 205 195 345
-73 128 -194 193 -509 274 -286 74 -348 98 -515 199 -188 114 -457 243 -515
248 -30 2 -79 -7 -141 -26z"/>
<path d="M5105 9354 c-550 -254 -547 -253 -925 -345 -236 -57 -301 -77 -377
-114 -204 -100 -290 -244 -223 -374 39 -78 115 -129 232 -156 99 -23 629 -25
838 -4 52 5 187 14 300 19 292 14 442 43 596 116 222 105 340 285 304 461 -32
154 -131 268 -311 356 -111 54 -240 97 -290 97 -13 -1 -78 -26 -144 -56z"/>
<path d="M1200 6430 l0 -1120 165 0 165 0 2 496 3 496 206 -494 207 -493 181
-3 181 -2 -6 27 c-4 16 -12 39 -20 53 -7 14 -116 264 -243 555 l-231 530 216
510 c118 281 220 518 225 527 19 37 13 38 -177 36 l-186 -3 -177 -463 -176
-462 -3 465 -2 465 -165 0 -165 0 0 -1120z"/>
<path d="M3005 7528 c-3 -13 -97 -504 -210 -1093 -113 -588 -208 -1082 -211
-1097 l-6 -28 161 0 c89 0 161 3 162 8 0 4 15 97 32 207 l32 200 212 3 213 2
5 -22 c3 -13 16 -95 30 -183 14 -88 28 -172 31 -187 l6 -28 160 0 160 0 -5 23
c-3 12 -99 508 -212 1102 -113 594 -208 1088 -211 1098 -5 15 -22 17 -174 17
-168 0 -170 0 -175 -22z m258 -1010 c43 -266 78 -486 77 -490 0 -5 -75 -8
-166 -8 l-166 0 5 26 c2 14 38 234 78 490 42 263 78 464 84 464 6 0 44 -209
88 -482z"/>
<path d="M4150 7399 l0 -150 290 3 c286 3 290 3 284 -17 -4 -11 -144 -402
-312 -870 l-305 -850 -5 -102 -4 -103 516 0 516 0 0 151 0 150 -315 -3 c-173
-2 -315 -2 -315 -1 0 1 120 334 266 740 380 1054 364 1004 364 1113 l0 90
-490 0 -490 0 0 -151z"/>
<path d="M1251 4291 c-15 -4 -27 -17 -31 -31 -12 -46 -20 -984 -10 -1269 30
-885 107 -1328 426 -2460 109 -387 161 -468 329 -516 169 -50 310 55 370 275
26 92 31 634 14 1355 -7 308 -13 585 -14 615 l0 55 256 3 257 2 6 -142 c35
-740 202 -1240 531 -1587 69 -73 201 -179 262 -210 100 -50 231 -33 303 39 22
22 42 40 44 40 2 0 9 -37 16 -82 20 -144 63 -245 129 -306 111 -103 309 -85
415 39 68 80 92 145 196 525 282 1027 353 1413 390 2109 16 300 10 1519 -8
1537 -11 11 -373 13 -1935 14 -1057 0 -1933 -2 -1946 -5z m2775 -2093 c-14
-256 -26 -764 -26 -1055 0 -167 -4 -303 -8 -303 -5 0 -17 10 -28 21 -10 12
-47 41 -80 65 -128 92 -247 242 -324 408 -106 228 -160 493 -184 904 l-5 82
330 0 331 0 -6 -122z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 4.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 83 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
bin2/look/noel/kaz-tete.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 116 KiB

BIN
bin2/look/noel/kazdate.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
bin2/look/noel/kazmel.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.6 KiB

86
bin2/look/noel/logo.svg Normal file
View File

@@ -0,0 +1,86 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
version="1.0"
width="640.000000pt"
height="1280.000000pt"
viewBox="0 0 640.000000 1280.000000"
preserveAspectRatio="xMidYMid meet"
id="svg18"
sodipodi:docname="logo.svg"
xml:space="preserve"
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg"><defs
id="defs22" /><sodipodi:namedview
id="namedview20"
pagecolor="#ffffff"
bordercolor="#666666"
borderopacity="1.0"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:document-units="pt"
showgrid="false"
inkscape:zoom="0.36440298"
inkscape:cx="428.09749"
inkscape:cy="939.89353"
inkscape:window-width="1920"
inkscape:window-height="1032"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="1"
inkscape:current-layer="svg18" /><g
transform="translate(0.000000,1280.000000) scale(0.100000,-0.100000)"
fill="#000000"
stroke="none"
id="g16"><path
d="M1450 12780 c-28 -28 -38 -56 -65 -190 -70 -337 -103 -1013 -111 -2260 l-6 -785 69 -32 c218 -103 428 -212 520 -270 133 -84 182 -103 396 -159 350 -91 440 -127 536 -211 65 -57 94 -102 118 -184 l19 -64 274 0 274 0 17 60 c31 102 63 147 162 221 77 57 183 96 406 150 386 92 459 117 766 259 262 121 388 175 408 175 23 0 24 44 15 915 -13 1328 -47 1950 -124 2250 -35 134 -58 158 -129 130 -78 -29 -169 -147 -453 -587 -216 -333 -330 -485 -385 -512 -32 -15 -76 -19 -265 -27 -269 -10 -1017 -10 -1288 0 -176 6 -194 8 -240 32 -64 33 -95 73 -419 559 -272 408 -340 497 -406 530 -48 25 -64 25 -89 0z m197 -402 c50 -55 184 -325 220 -445 22 -76 23 -250 0 -309 -41 -108 -100 -164 -173 -164 -50 0 -67 17 -88 90 -24 80 -72 370 -92 549 -20 188 -16 257 16 283 32 26 91 24 117 -4z m3353 12 c29 -16 43 -84 36 -170 -20 -223 -80 -593 -112 -693 -18 -54 -36 -67 -93 -67 -53 0 -110 49 -148 128 -22 47 -28 75 -31 154 -7 150 13 217 125 437 52 102 102 194 110 204 18 19 83 23 113 7z m-2675 -1975 c22 -4 67 -21 99 -38 95 -51 142 -134 117 -209 -14 -41 -72 -103 -120 -127 -46 -23 -139 -47 -148 -38 -3 4 5 20 20 35 37 39 59 98 59 162 1 82 -17 134 -62 182 -45 46 -47 51 -22 45 9 -3 35 -8 57 -12z m-199 -36 c-48 -56 -59 -97 -54 -188 5 -78 19 -118 58 -159 12 -13 20 -25 18 -28 -7 -6 -108 26 -137 44 -75 46 -111 99 -111 162 0 49 17 84 63 128 36 35 136 81 175 82 23 0 22 -2 -12 -41z m1984 37 c0 -3 -13 -19 -30 -37 -79 -87 -79 -255 1 -340 16 -18 28 -33 26 -36 -2 -2 -34 6 -72 18 -79 24 -157 89 -176 146 -30 92 44 191 179 239 37 13 72 18 72 10z m223 -16 c60 -23 130 -78 152 -121 19 -36 19 -102 1 -138 -17 -32 -73 -84 -114 -105 -59 -30 -193 -45 -140 -15 10 5 29 30 44 54 54 94 39 240 -33 310 l-36 35 36 0 c20 0 60 -9 90 -20z m-1089 -306 c9 -3 59 -69 113 -146 53 -77 106 -146 118 -154 11 -7 39 -16 60 -19 52 -6 75 -34 75 -92 0 -67 -31 -93 -111 -93 -36 0 -75 7 -96 17 -34 16 -117 106 -170 186 -14 20 -29 37 -33 37 -4 0 -19 -17 -32 -37 -44 -66 -118 -147 -160 -175 -34 -23 -52 -28 -105 -28 -55 0 -68 4 -88 25 -20 19 -25 34 -25 73 0 60 17 78 84 91 57 10 72 26 186 193 44 64 85 119 92 121 19 8 74 8 92 1z"
id="path2" /><path
d="M1130 9411 c-340 -102 -544 -266 -581 -467 -34 -181 103 -365 345 -466 158 -65 303 -89 656 -108 569 -31 954 -31 1065 1 190 54 276 205 195 345 -73 128 -194 193 -509 274 -286 74 -348 98 -515 199 -188 114 -457 243 -515 248 -30 2 -79 -7 -141 -26z"
id="path4" /><path
d="M5105 9354 c-550 -254 -547 -253 -925 -345 -236 -57 -301 -77 -377 -114 -204 -100 -290 -244 -223 -374 39 -78 115 -129 232 -156 99 -23 629 -25 838 -4 52 5 187 14 300 19 292 14 442 43 596 116 222 105 340 285 304 461 -32 154 -131 268 -311 356 -111 54 -240 97 -290 97 -13 -1 -78 -26 -144 -56z"
id="path6" /><path
d="M1200 6430 l0 -1120 165 0 165 0 2 496 3 496 206 -494 207 -493 181 -3 181 -2 -6 27 c-4 16 -12 39 -20 53 -7 14 -116 264 -243 555 l-231 530 216 510 c118 281 220 518 225 527 19 37 13 38 -177 36 l-186 -3 -177 -463 -176 -462 -3 465 -2 465 -165 0 -165 0 0 -1120z"
id="path8" /><path
d="M3005 7528 c-3 -13 -97 -504 -210 -1093 -113 -588 -208 -1082 -211 -1097 l-6 -28 161 0 c89 0 161 3 162 8 0 4 15 97 32 207 l32 200 212 3 213 2 5 -22 c3 -13 16 -95 30 -183 14 -88 28 -172 31 -187 l6 -28 160 0 160 0 -5 23 c-3 12 -99 508 -212 1102 -113 594 -208 1088 -211 1098 -5 15 -22 17 -174 17 -168 0 -170 0 -175 -22z m258 -1010 c43 -266 78 -486 77 -490 0 -5 -75 -8 -166 -8 l-166 0 5 26 c2 14 38 234 78 490 42 263 78 464 84 464 6 0 44 -209 88 -482z"
id="path10" /><path
d="M4150 7399 l0 -150 290 3 c286 3 290 3 284 -17 -4 -11 -144 -402 -312 -870 l-305 -850 -5 -102 -4 -103 516 0 516 0 0 151 0 150 -315 -3 c-173 -2 -315 -2 -315 -1 0 1 120 334 266 740 380 1054 364 1004 364 1113 l0 90 -490 0 -490 0 0 -151z"
id="path12" /><path
d="M1251 4291 c-15 -4 -27 -17 -31 -31 -12 -46 -20 -984 -10 -1269 30 -885 107 -1328 426 -2460 109 -387 161 -468 329 -516 169 -50 310 55 370 275 26 92 31 634 14 1355 -7 308 -13 585 -14 615 l0 55 256 3 257 2 6 -142 c35 -740 202 -1240 531 -1587 69 -73 201 -179 262 -210 100 -50 231 -33 303 39 22 22 42 40 44 40 2 0 9 -37 16 -82 20 -144 63 -245 129 -306 111 -103 309 -85 415 39 68 80 92 145 196 525 282 1027 353 1413 390 2109 16 300 10 1519 -8 1537 -11 11 -373 13 -1935 14 -1057 0 -1933 -2 -1946 -5z m2775 -2093 c-14 -256 -26 -764 -26 -1055 0 -167 -4 -303 -8 -303 -5 0 -17 10 -28 21 -10 12 -47 41 -80 65 -128 92 -247 242 -324 408 -106 228 -160 493 -184 904 l-5 82 330 0 331 0 -6 -122z"
id="path14" /></g><image
width="205.84871"
height="157.69467"
preserveAspectRatio="none"
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAaCAYAAACkVDyJAAAABHNCSVQICAgIfAhkiAAABS9JREFU
SIm1lVuIXWcVx39r7332Pmfm3DvNxM5kmlQT05SmtfGhtY2CsUwVCsXavETQF0PBUlosptDigC8q
PlkQFEQiFYQBA0pN6Rh7p2pCmoGWNkY604nJ3M51n9s++7p8mBmZmJNpKvYPH3zs72P9+K+19reE
T1CqavSS/g/S2Pvcbjf7/sKcYf2vwY4/e+DTd5yuHCpV+5/P1P2bvbQxETlWUVvBpT66XCvZSxeO
/2x2z7cfnwLBNi2GrGE+FvCR6UfMJ56ffWjnheqTpefe+UK6E4qoApAgdBWaItt9YPjDhJxRQtec
EiUJYRRdP/DEd/buueuHLx/fcaF5j4QxsulMgb4KTVFqJLgIPlBGieMYP4houm0qtTrm9cBePbLv
q/e+sPDitoX2bkkSZB2yAQ2ApkAVqGLgAh5CXCiRuvseao0mHy4tMbd4+aMdzhz57L0HZuZPZGte
evP3DVgCdBHqQFWglkDHMcLWrsKPLg0tPl/553u/7rXCg+1OV3vqnZKrCJv01FP7h49Nz58f+Vdn
fM3TlVKgh1BBuIyyDNQFlu4sPzb1du3nAN//w09ymahciIKWjBZSq1u6e+e+Hc+oIarCwBUKugo6
i+hJEf2ViP54JNOemtpnXyumca2D6cf275r4R/17qrrWaevp27z3FVoCDVlLZxNwUV26KXN1OtY1
sIYzh/d87XO/n/uFU+2VQhWQjSbZiLPWhcsizKsyJ8JFVVZRPOX87sXcNYFX1fDPD++dvOOl+ZPp
TmiAXnFDAU+go0oVYQ7hA5TLQAXoOLIc31fYe+pUw71uh+b71UNuNzC6CJYIBiCiJCoEKK6CC6wo
LIpSB/qAopj51KmXtoANBJ4rGvMrgC0JGYSMCmkEBzBUCUUwENIiFFWJBNIKRRHidjS7FWywQ1Uz
BEKgB6RQUgoOiiNCWtdcDwtEAomCLdAX6G0bvpWF9pbAK7r06C8PpMYWvEdvQCkBBSCDkBJBRQiA
LtAWaAMdlEAgRlGFUm7Ht154483Xf/OX6bGPBJ44+9qtD9YfnItWvNtc1l4PT5U+CZ4m9FRpozQl
oUFCk4Q24AE+Qmya7H/0u9aNheLBgj1yfGp6atC/KBbAc3/7bb6swzPFB74+3k2XaJ09w9zMDOp5
GKZJ7PtIEBKnLMwgJFUqIuUSQ9tGyeYL7Jy8n+zYBJ86+EXqLZehbvYrE4UDh4AXB9VQCu3yN+K8
jpvFLLuOfJP80aMcsoSw3cW0UwT9PtW5Dxj9zG4un5tl9PbbyI1uBxRUiOOYXt+n3e2hiSKJYoTG
4U1AWV93CRB+aXLSfPLYs7L9hjLlYp5iNksm45C2bUzDQGTwk6uqRHGC7wd0PI9Wu0Ot6bJcqfHX
c6f56dPH4MrBggVYhe0jrFSqJHFMFMWEfkQ+O0QmncGxLSzLwjAEYx2cqKJxTBjH9IOQrtfH7fZo
uC6r9QarjQZ/f/WV/9Ttv1PK0sVL9IOQxZVV3E6HciFPMZcjOzxExrGxbRvLNNeBuuYsiugHIT3P
p93r0Wh1aLZcup7Pe+dnefftswOzIoAapsHd93+ZyYcOUxougIBlmTi2Q8Z2cByHlGVhigECcZIQ
hCF938cPAvpBQBTFxCizZ97iT9O/o7Y8eBJtDG8A7LTD2C07GR0bZ+/+OxmfuBk7ZZPPlYg1wTRM
ZL0iqoqKUKuskLJtXj75R2qVVd49fWYgaCBwkAzTYCiXxet0sTNpRiZuonpxEUMMDMuk03TRZMsQ
Hw/4/9Y1B/AnpX8DQ2J16tSILa8AAAAASUVORK5CYII=
"
id="image136"
x="239.88551"
y="10.151023" /></svg>

After

Width:  |  Height:  |  Size: 7.7 KiB

191
bin2/manageAgora.sh Executable file
View File

@@ -0,0 +1,191 @@
#!/bin/bash
# Script de manipulation d'un mattermost'
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
#GLOBAL VARS
PRG=$(basename $0)
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
QUIET="1"
ONNAS=
AGORACOMMUN="OUI_PAR_DEFAUT"
DockerServName=${mattermostServName}
declare -A Posts
usage() {
echo "${PRG} [OPTION] [COMMANDES] [ORGA]
Manipulation d'un mattermost
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
--nas L'orga se trouve sur le NAS !
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du mattermost
-v|--version Donne la version du mattermost et signale les MàJ
-mmctl \"command\" Envoie une commande via mmctl ** SPECIFIQUES **
-p|--post \"team\" \"message\" Poste un message dans une team agora ** AGORA **
ORGA parmi : ${AVAILABLE_ORGAS}
ou vide si mattermost commun
"
}
Init(){
NOM=$ORGA
if [ -n "$AGORACOMMUN" ] ; then NOM="KAZ" ; fi
CONF_FILE="${DOCK_VOL}/orga_${ORGA}-matterConfig/_data/config.json"
if [ -n "${AGORACOMMUN}" ]; then
CONF_FILE="${DOCK_VOL}/mattermost_matterConfig/_data/config.json"
elif [ -n "${ONNAS}" ]; then
CONF_FILE="${NAS_VOL}/orga_${ORGA}-matterConfig/_data/config.json"
fi
${SIMU} sed -i \
-e 's|"SiteURL": ".*"|"SiteURL": "'${MATTER_URL}'"|g' \
-e 's|"ListenAddress": ".*"|"ListenAddress": ":'${matterPort}'"|g' \
-e 's|"WebsocketURL": ".*"|"WebsocketURL": "wss://'${MATTER_URI}'"|g' \
-e 's|"AllowCorsFrom": ".*"|"AllowCorsFrom": "'${domain}' '${MATTER_URI}':443 '${MATTER_URI}'"|g' \
-e 's|"ConsoleLevel": ".*"|"ConsoleLevel": "ERROR"|g' \
-e 's|"SendEmailNotifications": false|"SendEmailNotifications": true|g' \
-e 's|"FeedbackEmail": ".*"|"FeedbackEmail": "admin@'${domain}'"|g' \
-e 's|"FeedbackOrganization": ".*"|"FeedbackOrganization": "Cochez la KAZ du libre !"|g' \
-e 's|"ReplyToAddress": ".*"|"ReplyToAddress": "admin@'${domain}'"|g' \
-e 's|"SMTPServer": ".*"|"SMTPServer": "mail.'${domain}'"|g' \
-e 's|"SMTPPort": ".*"|"SMTPPort": "25"|g' \
-e 's|"DefaultServerLocale": ".*"|"DefaultServerLocale": "fr"|g' \
-e 's|"DefaultClientLocale": ".*"|"DefaultClientLocale": "fr"|g' \
-e 's|"AvailableLocales": ".*"|"AvailableLocales": "fr"|g' \
${CONF_FILE}
# on redémarre pour prendre en compte (changement de port)
${SIMU} docker restart "${DockerServName}"
[ $? -ne 0 ] && printKazError "$DockerServName est down : impossible de terminer l'install" && return 1 >& $QUIET
${SIMU} waitUrl "$MATTER_URL" 300
[ $? -ne 0 ] && printKazError "$DockerServName ne parvient pas à démarrer correctement : impossible de terminer l'install" && return 1 >& $QUIET
# creation compte admin
_getPasswords
${SIMU} curl -i -d "{\"email\":\"${MM_ADMIN_EMAIL}\",\"username\":\"${mattermost_user}\",\"password\":\"${mattermost_pass}\",\"allow_marketing\":true}" "${MATTER_URL}/api/v4/users"
MM_TOKEN=$(_getMMToken ${MATTER_URL})
#on crée la team
${SIMU} curl -i -H "Authorization: Bearer ${MM_TOKEN}" -d "{\"display_name\":\"${NOM}\",\"name\":\"${NOM,,}\",\"type\":\"O\"}" "${MATTER_URL}/api/v4/teams"
}
Version(){
VERSION=$(docker exec "$DockerServName" bin/mmctl version | grep -i version:)
echo "Version $DockerServName : ${GREEN}${VERSION}${NC}"
}
_getMMToken(){
#$1 MATTER_URL
_getPasswords
${SIMU} curl -i -s -d "{\"login_id\":\"${mattermost_user}\",\"password\":\"${mattermost_pass}\"}" "${1}/api/v4/users/login" | grep 'token' | sed 's/token:\s*\(.*\)\s*/\1/' | tr -d '\r'
}
PostMessage(){
printKazMsg "Envoi à $TEAM : $MESSAGE" >& $QUIET
_getPasswords
${SIMU} docker exec -ti "${DockerServName}" bin/mmctl auth login "${MATTER_URL}" --name local-server --username ${mattermost_user} --password ${mattermost_pass}
${SIMU} docker exec -ti "${DockerServName}" bin/mmctl post create "${TEAM}" --message "${MESSAGE}"
}
MmctlCommand(){
# $1 command
${SIMU} docker exec -u 33 "$DockerServName" bin/mmctl $1
}
_getPasswords(){
# récupération des infos du compte admin
if [ -n "$AGORACOMMUN" ] ; then
. $KAZ_KEY_DIR/env-mattermostAdmin
. $KAZ_KEY_DIR/env-mattermostServ
else
. $KAZ_KEY_DIR/orgas/${ORGA}/env-mattermostAdmin
. $KAZ_KEY_DIR/orgas/$ORGA/env-mattermostServ
fi
}
########## Main #################
for ARG in "$@"; do
if [ -n "${GETMMCTLCOMAND}" ]; then # après un -mmctl
MMCTLCOMAND="${ARG}"
GETMMCTLCOMAND=
elif [ -n "${GETTEAM}" ]; then # après un --post
GETMESSAGE="now"
GETTEAM=""
TEAM="${ARG}"
elif [ -n "${GETMESSAGE}" ]; then # après un --post "team:channel"
if [[ $TEAM == "-*" && ${#TEAM} -le 5 ]]; then echo "J'envoie mon message à \"${TEAM}\" ?? Arf, ça me plait pas j'ai l'impression que tu t'es planté sur la commande."; usage ; exit 1 ; fi
if [[ $ARG == "-*" && ${#ARG} -le 5 ]]; then echo "J'envoie le message \"${ARG}\" ?? Arf, ça me plait pas j'ai l'impression que tu t'es planté sur la commande."; usage ; exit 1 ; fi
if [[ ! $TEAM =~ .*:.+ ]]; then echo "Il faut mettre un destinataire sous la forme team:channel. Recommence !"; usage ; exit 1 ; fi
MESSAGE="$ARG"
GETMESSAGE=""
else
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'--nas' | '-nas' )
ONNAS="SURNAS" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'--mmctl' | '-mmctl' )
COMMANDS="$(echo "${COMMANDS} RUN-AGORA-MMCTL" | sed "s/\s/\n/g" | sort | uniq)"
GETMMCTLCOMAND="now" ;;
'-p' | '--post' )
COMMANDS="$(echo "${COMMANDS} POST-AGORA" | sed "s/\s/\n/g" | sort | uniq)"
GETTEAM="now" ;;
'-*' ) # ignore
;;
*)
ORGA="${ARG%-orga}"
DockerServName="${ORGA}-${mattermostServName}"
AGORACOMMUN=
;;
esac
fi
done
if [ -z "${COMMANDS}" ]; then usage && exit ; fi
MATTER_URI="${ORGA}-${matterHost}.${domain}"
if [ -n "$AGORACOMMUN" ]; then MATTER_URI="${matterHost}.${domain}" ; fi
MATTER_URL="${httpProto}://${MATTER_URI}"
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'VERSION' )
Version && exit ;;
'INIT' )
Init ;;
'RUN-AGORA-MMCTL' )
MmctlCommand "$MMCTLCOMAND" ;;
'POST-AGORA' )
PostMessage ;;
esac
done

123
bin2/manageCastopod.sh Executable file
View File

@@ -0,0 +1,123 @@
#!/bin/bash
# Script de manipulation d'un wordpress'
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
#GLOBAL VARS
PRG=$(basename $0)
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
QUIET="1"
ONNAS=
CASTOPOD_COMMUN="OUI_PAR_DEFAUT"
DockerServName=${castopodServName}
declare -A Posts
usage() {
echo "${PRG} [OPTION] [COMMANDES] [ORGA]
Manipulation d'un castopod
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
--nas L'orga se trouve sur le NAS !
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du castopod
-v|--version Donne la version du castopod et signale les MàJ
ORGA parmi : ${AVAILABLE_ORGAS}
ou vide si castopod commun
"
}
Init(){
POD_URL="${httpProto}://${ORGA}-${castopodHost}.${domain}"
[ -n "${CASTOPOD_COMMUN}" ] && POD_URL="${httpProto}://${castopodHost}.${domain}"
if ! [[ "$(docker ps -f name=${DockerServName} | grep -w ${DockerServName})" ]]; then
printKazError "Castopod not running... abort"
exit
fi
echo "\n *** Premier lancement de Castopod" >& $QUIET
${SIMU} waitUrl "${POD_URL}"
CI_SESSION=$(echo ${headers} | grep "ci_session" | sed "s/.*ci_session=//")
cookies=$(curl -c - ${POD_URL})
CSRF_TOKEN=$(curl --cookie <(echo "$cookies") ${POD_URL}/cp-install | grep "csrf_test_name" | sed "s/.*value=.//" | sed "s/.>//")
_getPasswords
#echo ${CSRF_TOKEN}
${SIMU} curl --cookie <(echo "$cookies") -X POST \
-d "username=${ADMIN_USER}" \
-d "password=${ADMIN_PASSWORD}" \
-d "email=${ADMIN_MAIL}" \
-d "csrf_test_name=${CSRF_TOKEN}" \
"${POD_URL}/cp-install/create-superadmin"
}
Version(){
VERSION="TODO"
echo "Version $DockerServName : ${GREEN}${VERSION}${NC}"
}
_getPasswords(){
if [ -n "$CASTOPOD_COMMUN" ]; then
. $KAZ_KEY_DIR/env-castopodAdmin
else
. $KAZ_KEY_DIR/orgas/$ORGA/env-castopodAdmin
fi
}
########## Main #################
for ARG in "$@"; do
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'--nas' | '-nas' )
ONNAS="SURNAS" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-*' ) # ignore
;;
*)
ORGA="${ARG%-orga}"
DockerServName="${ORGA}-${castopodServName}"
CASTOPOD_COMMUN=
;;
esac
done
if [ -z "${COMMANDS}" ]; then usage && exit ; fi
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'VERSION' )
Version && exit ;;
'INIT' )
Init ;;
esac
done

408
bin2/manageCloud.sh Executable file
View File

@@ -0,0 +1,408 @@
#!/bin/bash
# Script de manipulation d'un cloud'
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
#GLOBAL VARS
PRG=$(basename $0)
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
# CLOUD
APPLIS_PAR_DEFAUT="tasks calendar contacts bookmarks mail richdocuments external drawio ransomware_protection" #rainloop richdocumentscode
QUIET="1"
ONNAS=
CLOUDCOMMUN="OUI_PAR_DEFAUT"
DockerServName=${nextcloudServName}
usage() {
echo "${PRG} [OPTION] [COMMANDES] [ORGA]
Manipulation d'un cloud
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
--nas L'orga se trouve sur le NAS !
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du cloud
-v|--version Donne la version du cloud et signale les MàJ
--optim Lance la procédure Nextcloud pour optimiser les performances ** **
-occ \"command\" Envoie une commande via occ ** **
-u Mets à jour les applis ** SPECIFIQUES **
-i Install des applis ** CLOUD **
-a \"app1 app2 ...\" Choix des appli à installer ou mettre à jour (entre guillemets) ** **
-U|--upgrade Upgrade des clouds ** **
-O|--officeURL MAJ le office de ce nextcloud ** **
ORGA parmi : ${AVAILABLE_ORGAS}
ou vide si cloud commun
"
}
##################################
############### CLOUD ############
##################################
Init(){
NOM=$ORGA
[ -n "${CLOUDCOMMUN}" ] && NOM="commun"
if [ -z "${LISTE_APPS}" ]; then
printKazMsg "Aucune appli n'est précisée, j'installerais les applis par défaut : ${APPLIS_PAR_DEFAUT}" >& $QUIET
LISTE_APPS="${APPLIS_PAR_DEFAUT}"
fi
checkDockerRunning "$DockerServName" "$NOM"
[ $? -ne 0 ] && echo "${CYAN}\n $DockerServName est down : impossible de terminer l'install${NC}" && return 1 >& $QUIET
CONF_FILE="${DOCK_VOL}/orga_${ORGA}-cloudConfig/_data/config.php"
CLOUD_URL="https://${ORGA}-${cloudHost}.${domain}"
if [ -n "$CLOUDCOMMUN" ]; then
CONF_FILE="${DOCK_VOL}/cloud-cloudConfig/_data/config.php"
CLOUD_URL="https://${cloudHost}.${domain}"
elif [ -n "${ONNAS}" ]; then
CONF_FILE="${NAS_VOL}/orga_${ORGA}-cloudConfig/_data/config.php"
fi
firstInstall "$CLOUD_URL" "$CONF_FILE" "$NOM"
updatePhpConf "$CONF_FILE"
InstallApplis
echo "${CYAN} *** Paramétrage richdocuments pour $ORGA${NC}" >& $QUIET
setOfficeUrl
occCommand "config:app:set --value 10.0.0.0/8,172.16.0.0/12,192.168.0.0/16 richdocuments wopi_allowlist"
occCommand "config:system:set overwrite.cli.url --value=$CLOUD_URL"
occCommand "config:system:set disable_certificate_verification --value=true"
if [ -n "$CLOUDCOMMUN" ]; then initLdap "$NOM" ; fi
}
Version(){
VERSION=$(docker exec -u 33 ${DockerServName} /var/www/html/occ status | grep -i version:)
VERSION_UPDATE=$(docker exec -u 33 ${DockerServName} /var/www/html/occ update:check | grep -i "available\." | cut -c 1-17)
versionSTR="Version ${DockerServName} : ${GREEN}${VERSION}${NC}"
[ -n "${VERSION_UPDATE}" ] && versionSTR="$versionSTR -- Disponible : ${RED} ${VERSION_UPDATE} ${NC}"
echo "$versionSTR"
}
firstInstall(){
# $1 CLOUD_URL
# $2 phpConfFile
# $3 orga
if ! grep -q "'installed' => true," "$2" 2> /dev/null; then
printKazMsg "\n *** Premier lancement nextcloud $3" >& $QUIET
_getPasswords
${SIMU} waitUrl "$1"
${SIMU} curl -X POST \
-d "install=true" \
-d "adminlogin=${NEXTCLOUD_ADMIN_USER}" \
-d "adminpass=${NEXTCLOUD_ADMIN_PASSWORD}" \
-d "directory=/var/www/html/data" \
-d "dbtype=mysql" \
-d "dbuser=${MYSQL_USER}" \
-d "dbpass=${MYSQL_PASSWORD}" \
-d "dbname=${MYSQL_DATABASE}" \
-d "dbhost=${MYSQL_HOST}" \
-d "install-recommended-apps=true" \
"$1"
fi
}
_getPasswords(){
if [ -n "$CLOUDCOMMUN" ]; then
. $KAZ_KEY_DIR/env-nextcloudServ
. $KAZ_KEY_DIR/env-nextcloudDB
else
. $KAZ_KEY_DIR/orgas/$ORGA/env-nextcloudServ
. $KAZ_KEY_DIR/orgas/$ORGA/env-nextcloudDB
fi
}
setOfficeUrl(){
# Did le 25 mars les offices sont tous normalisé sur les serveurs https://${site}-${officeHost}.${domain}
#OFFICE_URL="https://${officeHost}.${domain}"
#if [ ! "${site}" = "prod1" ]; then
OFFICE_URL="https://${site}-${officeHost}.${domain}"
#fi
occCommand "config:app:set --value $OFFICE_URL richdocuments public_wopi_url"
occCommand "config:app:set --value $OFFICE_URL richdocuments wopi_url"
occCommand "config:app:set --value $OFFICE_URL richdocuments disable_certificate_verification"
}
initLdap(){
. $KAZ_KEY_DIR/env-ldapServ
# $1 Nom du cloud
echo "${CYAN} *** Installation LDAP pour $1${NC}" >& $QUIET
occCommand "app:enable user_ldap" "${DockerServName}"
occCommand "ldap:delete-config s01" "${DockerServName}"
occCommand "ldap:create-empty-config" "${DockerServName}"
occCommand "ldap:set-config s01 ldapAgentName cn=cloud,ou=applications,${ldap_root}" "${DockerServName}"
occCommand "ldap:set-config s01 ldapAgentPassword ${LDAP_CLOUD_PASSWORD}" "${DockerServName}"
occCommand "ldap:set-config s01 ldapBase ${ldap_root}" "${DockerServName}"
occCommand "ldap:set-config s01 ldapBaseGroups ${ldap_root}" "${DockerServName}"
occCommand "ldap:set-config s01 ldapBaseUsers ou=users,${ldap_root}" "${DockerServName}"
occCommand "ldap:set-config s01 ldapExpertUsernameAttr identifiantKaz" "${DockerServName}"
occCommand "ldap:set-config s01 ldapHost ${ldapServName}" "${DockerServName}"
occCommand "ldap:set-config s01 ldapPort 389" "${DockerServName}"
occCommand "ldap:set-config s01 ldapTLS 0" "${DockerServName}"
occCommand "ldap:set-config s01 ldapLoginFilter \"(&(objectclass=nextcloudAccount)(|(cn=%uid)(identifiantKaz=%uid)))\"" "${DockerServName}"
occCommand "ldap:set-config s01 ldapQuotaAttribute nextcloudQuota" "${DockerServName}"
occCommand "ldap:set-config s01 ldapUserFilter \"(&(objectclass=nextcloudAccount)(nextcloudEnabled=TRUE))\"" "${DockerServName}"
occCommand "ldap:set-config s01 ldapUserFilterObjectclass nextcloudAccount" "${DockerServName}"
occCommand "ldap:set-config s01 ldapEmailAttribute mail" "${DockerServName}"
occCommand "ldap:set-config s01 ldapUserDisplayName cn" "${DockerServName}"
occCommand "ldap:set-config s01 ldapUserFilterMode 1" "${DockerServName}"
occCommand "ldap:set-config s01 ldapConfigurationActive 1" "${DockerServName}"
# Dans le mariadb, pour permettre au ldap de reprendre la main : delete from oc_users where uid<>'admin';
# docker exec -i nextcloudDB mysql --user=<user> --password=<password> <db> <<< "delete from oc_users where uid<>'admin';"
# Doc : https://help.nextcloud.com/t/migration-to-ldap-keeping-users-and-data/13205
# Exemple de table/clés :
# +-------------------------------+----------------------------------------------------------+
# | Configuration | s01 |
# +-------------------------------+----------------------------------------------------------+
# | hasMemberOfFilterSupport | 0 |
# | homeFolderNamingRule | |
# | lastJpegPhotoLookup | 0 |
# | ldapAgentName | cn=cloud,ou=applications,dc=kaz,dc=sns |
# | ldapAgentPassword | *** |
# | ldapAttributesForGroupSearch | |
# | ldapAttributesForUserSearch | |
# | ldapBackgroundHost | |
# | ldapBackgroundPort | |
# | ldapBackupHost | |
# | ldapBackupPort | |
# | ldapBase | ou=users,dc=kaz,dc=sns |
# | ldapBaseGroups | ou=users,dc=kaz,dc=sns |
# | ldapBaseUsers | ou=users,dc=kaz,dc=sns |
# | ldapCacheTTL | 600 |
# | ldapConfigurationActive | 1 |
# | ldapConnectionTimeout | 15 |
# | ldapDefaultPPolicyDN | |
# | ldapDynamicGroupMemberURL | |
# | ldapEmailAttribute | mail |
# | ldapExperiencedAdmin | 0 |
# | ldapExpertUUIDGroupAttr | |
# | ldapExpertUUIDUserAttr | |
# | ldapExpertUsernameAttr | uid |
# | ldapExtStorageHomeAttribute | |
# | ldapGidNumber | gidNumber |
# | ldapGroupDisplayName | cn |
# | ldapGroupFilter | |
# | ldapGroupFilterGroups | |
# | ldapGroupFilterMode | 0 |
# | ldapGroupFilterObjectclass | |
# | ldapGroupMemberAssocAttr | |
# | ldapHost | ldap |
# | ldapIgnoreNamingRules | |
# | ldapLoginFilter | (&(|(objectclass=nextcloudAccount))(cn=%uid)) |
# | ldapLoginFilterAttributes | |
# | ldapLoginFilterEmail | 0 |
# | ldapLoginFilterMode | 0 |
# | ldapLoginFilterUsername | 1 |
# | ldapMatchingRuleInChainState | unknown |
# | ldapNestedGroups | 0 |
# | ldapOverrideMainServer | |
# | ldapPagingSize | 500 |
# | ldapPort | 389 |
# | ldapQuotaAttribute | nextcloudQuota |
# | ldapQuotaDefault | |
# | ldapTLS | 0 |
# | ldapUserAvatarRule | default |
# | ldapUserDisplayName | cn |
# | ldapUserDisplayName2 | |
# | ldapUserFilter | (&(objectclass=nextcloudAccount)(nextcloudEnabled=TRUE)) |
# | ldapUserFilterGroups | |
# | ldapUserFilterMode | 1 |
# | ldapUserFilterObjectclass | nextcloudAccount |
# | ldapUuidGroupAttribute | auto |
# | ldapUuidUserAttribute | auto |
# | turnOffCertCheck | 0 |
# | turnOnPasswordChange | 0 |
# | useMemberOfToDetectMembership | 1 |
# +-------------------------------+----------------------------------------------------------+
}
updatePhpConf(){
# $1 php_conf_file
if [ $# -ne 1 ]; then
echo "${RED}#Je ne sais pas ou écrire la conf php !${NC}"
return 1
fi
echo "${CYAN} *** Maj de la conf $1${NC}" >& $QUIET
PHPCONF="$1"
_addVarAfterInConf "default_language" " 'default_language' => 'fr'," "CONFIG = array (" "${PHPCONF}"
_addVarAfterInConf "theme" " 'theme' => ''," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "default_phone_region" " 'default_phone_region' => 'FR'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "loglevel" " 'loglevel' => 2," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "maintenance" " 'maintenance' => false," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "app_install_overwrite" " 'app_install_overwrite' => \n array (\n 0 => 'documents',\n )," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "overwriteprotocol" " 'overwriteprotocol' => 'https'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "mail_domain" " 'mail_domain' => '${domain}'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "mail_from_address" " 'mail_from_address' => 'admin'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "mail_smtpport" " 'mail_smtpport' => '25'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "mail_sendmailmode" " 'mail_sendmailmode' => 'smtp'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "mail_smtphost" " 'mail_smtphost' => '${smtpHost}.${domain}'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "mail_smtpmode" " 'mail_smtpmode' => 'smtp'," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "enable_previews" " 'enable_previews' => true," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "trashbin_retention_obligation" " 'trashbin_retention_obligation' => 'auto, 30'," "'installed' => true," "${PHPCONF}"
#pour supprimer le message "obtenir un compte gratuit" dans le footer
_addVarAfterInConf "simpleSignUpLink.shown" " 'simpleSignUpLink.shown' => false," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "trusted_proxies" " 'trusted_proxies' => array( 0 => '10.0.0.0/8', 1 => '172.16.0.0/12', 2 => '192.168.0.0/16' )," "'installed' => true," "${PHPCONF}"
_addVarAfterInConf "maintenance_window_start" " 'maintenance_window_start' => 1," "'installed' => true," "${PHPCONF}"
}
UpgradeClouds() {
echo "${NC}--------------------------------------------------------" >& $QUIET
echo "UPGRADE des cloud" >& $QUIET
echo "--------------------------------------------------------" >& $QUIET
occCommand "upgrade"
}
OptimiseClouds() {
occCommand "db:add-missing-indices" "db:convert-filecache-bigint --no-interaction"
}
UpdateApplis() {
printKazMsg "UPDATE DES APPLIS du cloud ${DockerServName} : ${LISTE_APPS}" >& $QUIET
if [ -z "${LISTE_APPS}" ]; then
occCommand "app:update --all"
return
fi
echo "Mise à jour de ${LISTE_APPS}" >& $QUIET
for app in ${LISTE_APPS}
do
occCommand "app:update ${app}"
done
}
InstallApplis(){
if [ -z "${LISTE_APPS}" ]; then
printKazMsg "Aucune appli n'est précisée, j'installe les applis par défaut : ${APPLIS_PAR_DEFAUT}" >& $QUIET
LISTE_APPS="${APPLIS_PAR_DEFAUT}"
fi
apps=$LISTE_APPS
if ! [[ "$(docker ps -f name=${DockerServName} | grep -w ${DockerServName})" ]]; then
printKazError "${RED}# ${DockerServName} not running... impossible d'installer les applis${NC}" >& $QUIET
return 1
fi
LIST_ALL=$(docker exec -ti -u 33 "${DockerServName}" /var/www/html/occ app:list |
awk 'BEGIN {cp=0}
/Enabled:/ {cp=1 ; next};
/Disabled:/ {cp=0; next};
{if (cp) print $0};')
for app in $apps
do
grep -wq "${app}" <<<"${LIST_ALL}" 2>/dev/null && echo "${app} dejà installée" >& $QUIET && continue
echo " install ${app}" >& $QUIET
occCommand "app:install ${app}"
done
}
occCommand(){
# $1 Command
${SIMU} docker exec -u 33 $DockerServName /var/www/html/occ $1
}
_addVarAfterInConf(){
# $1 key
# $2 val
# $3 where
# $4 confFile
if ! grep -q "$1" "${4}" ; then
echo -n " ${CYAN}${BOLD}$1${NC}" >& $QUIET
${SIMU} sed -i -e "/$3/a\ $2" "$4"
fi
}
########## Main #################
for ARG in "$@"; do
if [ -n "${GETOCCCOMAND}" ]; then # après un -occ
OCCCOMAND="${ARG}"
GETOCCCOMAND=
elif [ -n "${GETAPPS}" ]; then # après un -a
LISTE_APPS="${LISTE_APPS} ${ARG}"
GETAPPS=""
else
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'--nas' | '-nas' )
ONNAS="SURNAS" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-U' | '--upgrade')
COMMANDS="$(echo "${COMMANDS} UPGRADE" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-O' | '--officeURL')
COMMANDS="$(echo "${COMMANDS} OFFICEURL" | sed "s/\s/\n/g" | sort | uniq)" ;;
'--optim' | '-optim' )
COMMANDS="$(echo "${COMMANDS} OPTIMISE-CLOUD" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-u' )
COMMANDS="$(echo "${COMMANDS} UPDATE-CLOUD-APP" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-i' )
COMMANDS="$(echo "${COMMANDS} INSTALL-CLOUD-APP" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-a' )
GETAPPS="now" ;;
'--occ' | '-occ' )
COMMANDS="$(echo "${COMMANDS} RUN-CLOUD-OCC" | sed "s/\s/\n/g" | sort | uniq)"
GETOCCCOMAND="now" ;;
'-*' ) # ignore
;;
*)
ORGA="${ARG%-orga}"
DockerServName="${ORGA}-${nextcloudServName}"
CLOUDCOMMUN=
;;
esac
fi
done
if [ -z "${COMMANDS}" ]; then
usage && exit
fi
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'VERSION' )
Version && exit ;;
'OPTIMISE-CLOUD' )
OptimiseClouds ;;
'UPDATE-CLOUD-APP' )
UpdateApplis ;;
'UPGRADE' )
UpgradeClouds ;;
'INIT' )
Init ;;
'INSTALL-CLOUD-APP' )
InstallApplis ;;
'OFFICEURL' )
setOfficeUrl ;;
'RUN-CLOUD-OCC' )
occCommand "${OCCCOMAND}";;
esac
done

172
bin2/manageWiki.sh Executable file
View File

@@ -0,0 +1,172 @@
#!/bin/bash
# Script de manipulation d'un dokuwiki'
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
#GLOBAL VARS
PRG=$(basename $0)
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
DNLD_DIR="${KAZ_DNLD_DIR}/dokuwiki"
QUIET="1"
ONNAS=
WIKICOMMUN="OUI_PAR_DEFAUT"
DockerServName=${dokuwikiServName}
declare -A Posts
usage() {
echo "${PRG} [OPTION] [COMMANDES] [ORGA]
Manipulation d'un dokuwiki
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
--nas L'orga se trouve sur le NAS !
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du dokuwiki
-v|--version Donne la version du dokuwiki et signale les MàJ
--reload kill lighthttpd
ORGA parmi : ${AVAILABLE_ORGAS}
ou vide si dokuwiki commun
"
}
Init(){
NOM=$ORGA
if [ -n "$WIKICOMMUN" ] ; then NOM="KAZ" ; fi
TPL_DIR="${VOL_PREFIX}wikiLibtpl/_data"
PLG_DIR="${VOL_PREFIX}wikiPlugins/_data"
CONF_DIR="${VOL_PREFIX}wikiConf/_data"
if [ -n "$WIKICOMMUN" ]; then
. $KAZ_KEY_DIR/env-dokuwiki
else
. $KAZ_KEY_DIR/orgas/$ORGA/env-dokuwiki
fi
${SIMU} checkDockerRunning "${DockerServName}" "${NOM}" || exit
if [ ! -f "${CONF_DIR}/local.php" ] ; then
echo "\n *** Premier lancement de Dokuwiki ${NOM}" >& $QUIET
${SIMU} waitUrl "${WIKI_URL}"
${SIMU} curl -X POST \
-A "Mozilla/5.0 (X11; Linux x86_64)" \
-d "l=fr" \
-d "d[title]=${NOM}" \
-d "d[acl]=true" \
-d "d[superuser]=${WIKI_ROOT}" \
-d "d[fullname]=Admin"\
-d "d[email]=${WIKI_EMAIL}" \
-d "d[password]=${WIKI_PASSWORD}" \
-d "d[confirm]=${WIKI_PASSWORD}" \
-d "d[policy]=1" \
-d "d[allowreg]=false" \
-d "d[license]=0" \
-d "d[pop]=false" \
-d "submit=Enregistrer" \
"${WIKI_URL}/install.php"
# XXX initialiser admin:<pass>:admin:<mel>:admin,user
#${SIMU} rsync -auHAX local.php users.auth.php acl.auth.php "${CONF_DIR}/"
${SIMU} sed -i "${CONF_DIR}/local.php" \
-e "s|\(.*conf\['title'\].*=.*'\).*';|\1${NOM}';|g" \
-e "s|\(.*conf\['title'\].*=.*'\).*';|\1${NOM}';|g" \
-e "/conf\['template'\]/d" \
-e '$a\'"\$conf['template'] = 'docnavwiki';"''
${SIMU} sed -i -e "s|\(.*conf\['lang'\].*=.*'\)en';|\1fr';|g" "${CONF_DIR}/dokuwiki.php"
${SIMU} chown -R www-data: "${CONF_DIR}/"
fi
${SIMU} unzipInDir "${DNLD_DIR}/docnavwiki.zip" "${TPL_DIR}/"
${SIMU} chown -R www-data: "${TPL_DIR}/"
# ckgedit : bof
for plugin in captcha smtp todo wrap wrapadd; do
${SIMU} unzipInDir "${DNLD_DIR}/${plugin}.zip" "${PLG_DIR}"
done
${SIMU} chown -R www-data: "${PLG_DIR}/"
}
Version(){
# $1 ContainerName
VERSION=$(docker exec $1 cat /dokuwiki/VERSION)
echo "Version $1 : ${GREEN}${VERSION}${NC}"
}
Reload(){
# $1 ContainerName
if [ -f "${VOL_PREFIX}wikiData/_data/farms/init.sh" ]; then
${SIMU} docker exec -ti "${1}" /dokuwiki/data/farms/init.sh
${SIMU} pkill -KILL lighttpd
fi
}
########## Main #################
for ARG in "$@"; do
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'--nas' | '-nas' )
ONNAS="SURNAS" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'--reload' )
COMMANDS="$(echo "${COMMANDS} RELOAD" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-*' ) # ignore
;;
*)
ORGA="${ARG%-orga}"
DockerServName="${ORGA}-${dokuwikiServName}"
WIKICOMMUN=
;;
esac
done
if [ -z "${COMMANDS}" ]; then usage && exit ; fi
VOL_PREFIX="${DOCK_VOL}/orga_${ORGA}-"
WIKI_URL="${httpProto}://${ORGA}-${dokuwikiHost}.${domain}"
if [ -n "${WIKICOMMUN}" ]; then
VOL_PREFIX="${DOCK_VOL}/dokuwiki_doku"
WIKI_URL="${httpProto}://${dokuwikiHost}.${domain}"
elif [ -n "${ONNAS}" ]; then
VOL_PREFIX="${NAS_VOL}/orga_${ORGA}-"
fi
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'VERSION' )
Version "${DockerServName}" && exit ;;
'INIT' )
Init "${DockerServName}" ;;
'RELOAD' )
Reload "${DockerServName}";;
esac
done

136
bin2/manageWp.sh Executable file
View File

@@ -0,0 +1,136 @@
#!/bin/bash
# Script de manipulation d'un wordpress'
# init /versions / restart ...
#
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
#GLOBAL VARS
PRG=$(basename $0)
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
AVAILABLE_ORGAS=${availableOrga[*]//-orga/}
QUIET="1"
ONNAS=
WPCOMMUN="OUI_PAR_DEFAUT"
DockerServName=${wordpressServName}
declare -A Posts
usage() {
echo "${PRG} [OPTION] [COMMANDES] [ORGA]
Manipulation d'un wordpress
OPTIONS
-h|--help Cette aide :-)
-n|--simu SIMULATION
-q|--quiet On ne parle pas (utile avec le -n pour avoir que les commandes)
--nas L'orga se trouve sur le NAS !
COMMANDES (on peut en mettre plusieurs dans l'ordre souhaité)
-I|--install L'initialisation du wordpress
-v|--version Donne la version du wordpress et signale les MàJ
ORGA parmi : ${AVAILABLE_ORGAS}
ou vide si wordpress commun
"
}
Init(){
PHP_CONF="${DOCK_VOL}/orga_${ORGA}-wordpress/_data/wp-config.php"
WP_URL="${httpProto}://${ORGA}-${wordpressHost}.${domain}"
if [ -n "${ONNAS}" ]; then
PHP_CONF="${NAS_VOL}/orga_${ORGA}-wordpress/_data/wp-config.php"
fi
if ! [[ "$(docker ps -f name=${DockerServName} | grep -w ${DockerServName})" ]]; then
printKazError "Wordpress not running... abort"
exit
fi
# XXX trouver un test du genre if ! grep -q "'installed' => true," "${PHP_CONF}" 2> /dev/null; then
echo "\n *** Premier lancement de WP" >& $QUIET
${SIMU} waitUrl "${WP_URL}"
if [ -n "$WIKICOMMUN" ]; then
. $KAZ_KEY_DIR/env-wpServ
else
. $KAZ_KEY_DIR/orgas/$ORGA/env-wpServ
fi
${SIMU} curl -X POST \
-d "user_name=${WORDPRESS_ADMIN_USER}" \
-d "admin_password=${WORDPRESS_ADMIN_PASSWORD}" \
-d "admin_password2=${WORDPRESS_ADMIN_PASSWORD}" \
-d "pw_weak=true" \
-d "admin_email=admin@kaz.bzh" \
-d "blog_public=0" \
-d "language=fr_FR" \
"${WP_URL}/wp-admin/install.php?step=2"
#/* pour forcer les maj autrement qu'en ftp */
_addVarBeforeInConf "FS_METHOD" "define('FS_METHOD', 'direct');" "\/\* That's all, stop editing! Happy publishing. \*\/" "$PHP_CONF"
}
Version(){
VERSION=$(docker exec $DockerServName cat /var/www/html/wp-includes/version.php | grep "wp_version " | sed -e "s/.*version\s*=\s*[\"\']//" | sed "s/[\"\'].*//")
echo "Version $DockerServName : ${GREEN}${VERSION}${NC}"
}
_addVarBeforeInConf(){
# $1 key
# $2 ligne à ajouter avant la ligne
# $3 where
# $4 fichier de conf php
if ! grep -q "$1" "${4}" ; then
echo -n " ${CYAN}${BOLD}$1${NC}" >& $QUIET
${SIMU} sed -i -e "s/$3/$2\\n$3/" "${4}"
fi
}
########## Main #################
for ARG in "$@"; do
case "${ARG}" in
'-h' | '--help' )
usage && exit ;;
'-n' | '--simu')
SIMU="echo" ;;
'-q' )
QUIET="/dev/null" ;;
'--nas' | '-nas' )
ONNAS="SURNAS" ;;
'-v' | '--version')
COMMANDS="$(echo "${COMMANDS} VERSION" | sed "s/\s/\n/g" | sort | uniq)" ;;
'-I' | '--install' )
COMMANDS="$(echo "${COMMANDS} INIT" | sed "s/\s/\n/g" | sort | uniq)" ;; # le sed sort uniq, c'est pour pas l'avoir en double
'-*' ) # ignore
;;
*)
ORGA="${ARG%-orga}"
DockerServName="${ORGA}-${wordpressServName}"
WPCOMMUN=
;;
esac
done
if [ -z "${COMMANDS}" ]; then usage && exit ; fi
for COMMAND in ${COMMANDS}; do
case "${COMMAND}" in
'VERSION' )
Version && exit ;;
'INIT' )
Init ;;
esac
done

View File

@@ -0,0 +1,68 @@
#!/bin/bash
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
. $KAZ_ROOT/secret/SetAllPass.sh
newenvfile=$KAZ_KEY_DIR/env-mattermostAdmin
touch $newenvfile
echo "mattermost_user=$mattermost_user" >> $newenvfile
echo "mattermost_pass=$mattermost_pass" >> $newenvfile
echo "mattermost_token=$mattermost_token" >> $newenvfile
echo "EMAIL_CONTACT=$EMAIL_CONTACT" >> $DOCKERS_ENV
newenvfile=$KAZ_KEY_DIR/env-paheko
touch $newenvfile
echo "API_USER=$paheko_API_USER" >> $newenvfile
echo "API_PASSWORD=$paheko_API_PASSWORD" >> $newenvfile
newenvfile=$KAZ_KEY_DIR/env-mail
touch $newenvfile
echo "service_mail=$service_mail" >> $newenvfile
echo "service_password=$service_password" >> $newenvfile
newenvfile=$KAZ_KEY_DIR/env-borg
# touch $newenvfile à priori il existe déjà
echo "BORG_REPO=$BORG_REPO" >> $newenvfile
echo "BORG_PASSPHRASE=$BORG_PASSPHRASE" >> $newenvfile
echo "VOLUME_SAUVEGARDES=$VOLUME_SAUVEGARDES" >> $newenvfile
echo "MAIL_RAPPORT=$MAIL_RAPPORT" >> $newenvfile
echo "BORGMOUNT=$BORGMOUNT" >> $newenvfile
newenvfile=$KAZ_KEY_DIR/env-traefik
touch $newenvfile
echo "DASHBOARD_USER=$traefik_DASHBOARD_USER" >> $newenvfile
echo "DASHBOARD_PASSWORD=$traefik_DASHBOARD_PASSWORD" >> $newenvfile
#####################
# Castopod
# A COPIER DANS UN FICHIER DE CONF !! castopodAdmin
newenvfile=$KAZ_KEY_DIR/env-castopodAdmin
touch $newenvfile
echo "ADMIN_USER=$castopod_ADMIN_USER" >> $newenvfile
echo "ADMIN_MAIL=$castopod_ADMIN_MAIL" >> $newenvfile
echo "ADMIN_PASSWORD=$castopod_ADMIN_PASSWORD" >> $newenvfile
# creation dossier pour les env des orgas
mkdir $KAZ_KEY_DIR/orgas
orgasLong=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
ORGAS=${orgasLong[*]//-orga/}
for orga in ${ORGAS};do
mkdir $KAZ_KEY_DIR/orgas/$orga
cp $KAZ_KEY_DIR/env-{castopod{Admin,DB,Serv},mattermost{DB,Serv},nextcloud{DB,Serv},spip{DB,Serv},wp{DB,Serv}} $KAZ_KEY_DIR/orgas/$orga
done
echo "C'est parfait, vous pouvez git pull puis supprimer SetAllPass.sh"

146
bin2/migVersProdX.sh Executable file
View File

@@ -0,0 +1,146 @@
#!/bin/bash
#koi: pouvoir migrer une orga (data+dns) depuis PROD1 vers PRODx
#kan: 07/12/2023
#ki: françois puis fab (un peu)
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. $DOCKERS_ENV
. $KAZ_ROOT/secret/env-kaz
NAS_VOL="/mnt/disk-nas1/docker/volumes/"
tab_sites_destinations_possibles=${TAB_SITES_POSSIBLES}
#par défaut, on prend le premier site
SITE_DST="${tab_sites_destinations_possibles[0]}"
declare -a availableOrga
availableOrga=($(getList "${KAZ_CONF_DIR}/container-orga.list"))
export SIMU=""
export COPY=""
usage () {
echo "Usage: $0 [-n] [-d host_distant] [-c] [orga]...[orga]"
echo " -h : this help"
echo " -d host_distant : ${SITE_DST} par défaut"
echo " -n : simulation"
echo " -c : only copy data but doesn't stop"
echo " [orgas] : in ${availableOrga[@]}"
echo " example : migVersProdX.sh -d kazoulet -c splann-orga && migVersProdX.sh -d kazoulet splann-orga"
exit 1
}
while getopts "hncd:" option; do
case ${option} in
h)
usage
exit 0
;;
n)
SIMU="echo"
;;
c)
COPY="true"
;;
d)
SITE_DST=${OPTARG}
;;
esac
done
# site distant autorisé ?
if [[ " ${tab_sites_destinations_possibles[*]} " == *" $SITE_DST "* ]]; then
true
else
echo
echo "${RED}${BOLD}Sites distants possibles : ${tab_sites_destinations_possibles[@]}${NC}"
echo
usage
exit 0
fi
# Récupérer les orgas dans un tableau
shift $((OPTIND-1))
Orgas=("$@")
#ces orgas existent-elles sur PROD1 ?
for orga in "${Orgas[@]}"; do
if [[ ! " ${availableOrga[@]} " =~ " ${orga} " ]]; then
echo
echo "Unknown orga: ${RED}${BOLD}${ARG}${orga}${NC}"
echo
usage
exit 0
fi
done
echo
echo "Site distant: ${GREEN}${BOLD}${SITE_DST}${NC}"
echo
#for orgaLong in ${Orgas}; do
# echo ${Orgas}
#done
#exit
for orgaLong in ${Orgas}; do
orgaCourt="${orgaLong%-orga}"
orgaLong="${orgaCourt}-orga"
echo "${BLUE}${BOLD}migration de ${orgaCourt}${NC}"
# if [ -d "${DOCK_VOL_PAHEKO_ORGA}/${orgaCourt}" ]; then
# if ! ssh -p 2201 root@${SITE_DST}.${domain} "test -d ${DOCK_VOL_PAHEKO_ORGA}/${orgaCourt}"; then
# echo "${RED}${BOLD} ... can't move paheko to ${SITE_DST}${NC}"
# echo " intall paheko in ${SITE_DST}.${domain} before!"
# continue
# fi
# fi
#on créé le répertoire de l'orga pour paheko sur SITE_DST s'il n'existe pas
#pratique quand paheko n'est pas encore installé sur PROD1 mais commandé
if [ -f "${KAZ_COMP_DIR}/${orgaLong}/usePaheko" ]; then
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "mkdir -p ${DOCK_VOL_PAHEKO_ORGA}/${orgaCourt} && chown www-data:www-data ${DOCK_VOL_PAHEKO_ORGA}/${orgaCourt}"
#ensuite, on peut refaire la liste des routes paheko pour traefik
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "cd ${KAZ_COMP_DIR}/paheko/ && ./docker-compose-gen.sh"
fi
if [ -z "${COPY}" ]; then
cd "${KAZ_COMP_DIR}/${orgaLong}"
docker-compose logs --tail 100| grep $(date "+ %Y-%m-%d")
checkContinue
${SIMU} docker-compose down
fi
if [ $(ls -d ${NAS_VOL}/orga_${orgaCourt}-* 2>/dev/null | wc -l) -gt 0 ]; then
echo "${BLUE}${BOLD} ... depuis nas${NC}"
${SIMU} rsync -aAhHX --info=progress2 --delete ${NAS_VOL}/orga_${orgaCourt}-* -e "ssh -p 2201" root@${SITE_DST}.${domain}:${DOCK_VOL}
else
echo "${BLUE}${BOLD} ... depuis disque${NC}"
${SIMU} rsync -aAhHX --info=progress2 --delete ${DOCK_VOL}/orga_${orgaCourt}-* -e "ssh -p 2201" root@${SITE_DST}.${domain}:${DOCK_VOL}
fi
if [ -z "${COPY}" ]; then
echo "${BLUE}${BOLD} ... config${NC}"
if [ -d "${DOCK_VOL_PAHEKO_ORGA}/${orgaCourt}" ]; then
${SIMU} rsync -aAhHX --info=progress2 --delete "${DOCK_VOL_PAHEKO_ORGA}/${orgaCourt}" -e "ssh -p 2201" root@${SITE_DST}.${domain}:"${DOCK_VOL_PAHEKO_ORGA}/"
fi
${SIMU} rsync -aAhHX --info=progress2 --delete ${KAZ_COMP_DIR}/${orgaLong} -e "ssh -p 2201" root@${SITE_DST}.${domain}:${KAZ_COMP_DIR}/
${SIMU} rsync -aAhHX --info=progress2 --delete ${KAZ_KEY_DIR}/orgas/${orgaCourt} -e "ssh -p 2201" root@${SITE_DST}.${domain}:${KAZ_KEY_DIR}/orgas/${orgaCourt}
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "grep -q '^${orgaLong}\$' /kaz/config/container-orga.list || echo ${orgaLong} >> /kaz/config/container-orga.list"
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} ${KAZ_COMP_DIR}/${orgaLong}/init-volume.sh
cd "${KAZ_COMP_DIR}/${orgaLong}"
${SIMU} ./orga-rm.sh
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "${KAZ_COMP_DIR}/${orgaLong}/orga-gen.sh" --create
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "${KAZ_BIN_DIR}/container.sh" start "${orgaLong}"
${SIMU} ssh -p 2201 root@${SITE_DST}.${domain} "${KAZ_BIN_DIR}/manageCloud.sh" --officeURL "${orgaCourt}"
fi
done

43
bin2/migration.sh Executable file
View File

@@ -0,0 +1,43 @@
#!/bin/bash
CV1=/kaz-old/bin/container.sh
DV1=/kaz-old/dockers
EV1=/kaz-old/config
SV1=/kaz-old/secret
BV2=/kaz/bin
DV2=/kaz/dockers
EV2=/kaz/config
SV2=/kaz/secret
OV2=/kaz/config/orgaTmpl/orga-gen.sh
[ -x "${CV1}" ] || exit
[ -d "${BV2}" ] || exit
SIMU="echo SIMU"
${SIMU} "${CV1}" stop orga
${SIMU} "${CV1}" stop
${SIMU} rsync "${EV1}/dockers.env" "${EV2}/"
${SIMU} rsync "${SV1}/" "${SV2}/"
# XXX ? rsync /kaz/secret/allow_admin_ip /kaz-git/secret/allow_admin_ip
${SIMU} "${BV2}/container.sh" start cloud dokuwiki ethercalc etherpad framadate paheko gitea jirafeau mattermost postfix proxy roundcube web
${SIMU} rsync -aAHXh --info=progress2 "${DV1}/web/html/" "/var/lib/docker/volumes/web_html/_data/"
${SIMU} chown -R www-data: "/var/lib/docker/volumes/web_html/_data/"
${SIMU} cd "${DV1}"
cd "${DV1}"
for ORGA_DIR in *-orga; do
services=$(echo $([ -x "${ORGA_DIR}/tmpl-gen.sh" ] && "${ORGA_DIR}/tmpl-gen.sh" -l))
if [ -n "${services}" ]; then
ORGA="${ORGA_DIR%-orga}"
echo " * ${ORGA}: ${services}"
${SIMU} "${OV2}" "${ORGA}" $(for s in ${services}; do echo "+${s}"; done)
fi
done

172
bin2/mvOrga2Nas.sh Executable file
View File

@@ -0,0 +1,172 @@
#!/bin/bash
# déplace des orga de
# /var/lib/docker/volumes/
# vers
# /mnt/disk-nas1/docker/volumes/
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
DOCK_NAS="/mnt/disk-nas1/docker/volumes"
DOCK_SRC="${DOCK_VOL}"
DOCK_DST="${DOCK_NAS}"
export PRG="$0"
cd $(dirname $0)
. "${DOCKERS_ENV}"
declare -a availableOrga
availableOrga=($(sed -e "s/\(.*\)[ \t]*#.*$/\1/" -e "s/^[ \t]*\(.*\)-orga$/\1/" -e "/^$/d" "${KAZ_CONF_DIR}/container-orga.list"))
# no more export in .env
export $(set | grep "domain=")
export SIMU=""
export ONLY_SYNC=""
export NO_SYNC=""
export FORCE=""
export REVERSE=""
usage(){
echo "Usage: ${PRG} [orga...]"
echo " -h help"
echo " -n simulation"
echo " -y force"
echo " -s phase1 only"
echo " -r reverse (${DOCK_NAS} to ${DOCK_VOL})"
echo " -ns no pre sync"
exit 1
}
for ARG in $@; do
case "${ARG}" in
'-h' | '-help' )
usage
;;
'-n' )
shift
export SIMU="echo"
;;
'-y' )
shift
export FORCE="yes"
;;
'-s' )
shift
export ONLY_SYNC="yes"
;;
'-r' )
shift
export REVERSE="yes"
;;
'-ns' )
shift
export NO_SYNC="yes"
;;
*)
break
;;
esac
done
[[ -z "$1" ]] && usage
for ARG in $@; do
[[ ! " ${availableOrga[*]} " =~ " ${ARG} " ]] && echo "${RED}${ARG}${NC} is not an orga" && usage
done
########################################
# on copie dans new pour que le changement soit atomique
NEW=""
if [ -n "${REVERSE}" ]; then
DOCK_SRC="${DOCK_NAS}"
DOCK_DST="${DOCK_VOL}"
NEW="/new"
${SIMU} mkdir -p "${DOCK_DST}${NEW}"
fi
# on garde une copie dans back au cas où
BACK="${DOCK_SRC}/old"
echo " move from ${BLUE}${BOLD}${DOCK_SRC}${NC} to ${BLUE}${BOLD}${DOCK_DST}${NC}"
checkContinue
cd "${DOCK_SRC}"
volext=$(ls -d orga* | sed 's%.*-%%' | sort -u)
declare -a orgaPhase2
# Pour que l'interruption de service soit la plus courte possible,
# on pré-copie toutes les infos de puis la création de l'orga
echo -n "${BLUE}Phase 1: pre sync.${NC} "
[[ -z "${FORCE}" ]] && [[ -z "${NO_SYNC}" ]] && checkContinue
echo
for ARG in $@; do
for EXT in ${volext}; do
vol="orga_${ARG}-${EXT}"
# test le service existe
[ -e "${DOCK_SRC}/${vol}" ] || continue
# si c'est un lien sur /var/lib c'est déjà fait
[ -z "${REVERSE}" ] && [ -L "${DOCK_SRC}/${vol}" ] && echo "${GREEN}${vol}${NC} : done" && continue
# si c'est un lien sur le NAS c'est un problème
[ -n "${REVERSE}" ] && [ -L "${DOCK_SRC}/${vol}" ] && echo "${GREEN}${vol}${NC} : bug" && continue
# si c'est pas un répertoire c'est un problème
! [ -d "${DOCK_SRC}/${vol}" ] && echo "${RED}${vol}${NC} : done ?" && continue
# si transfert est déjà fait
if [ -n "${REVERSE}" ]; then
! [ -L "${DOCK_DST}/${vol}" ] && echo "${RED}${vol}${NC} : done" && continue
fi
echo " - ${YELLOW}${vol}${NC}"
[[ -z "${NO_SYNC}" ]] && ${SIMU} rsync -auHAX --info=progress2 "${DOCK_SRC}/${vol}/" "${DOCK_DST}${NEW}/${vol}/"
[[ " ${orgaPhase2[@]} " =~ " ${ARG} " ]] || orgaPhase2+=( "${ARG}" )
done
done
[ -n "${ONLY_SYNC}" ] && exit 0
if (( ${#orgaPhase2[@]} == 0 )); then
exit 0
fi
echo -n "${BLUE}Phase 2: mv.${NC} "
[[ -z "${FORCE}" ]] && checkContinue
echo
mkdir -p "${BACK}"
for ARG in "${orgaPhase2[@]}"; do
cd "${KAZ_ROOT}"
cd "${KAZ_COMP_DIR}/${ARG}-orga"
! [ -e "docker-compose.yml" ] && echo "no docker-compose.yml for ${RED}${ARG}${NC}" && continue
${SIMU} docker-compose down
# L'arrêt ne durera que le temps de copier les modifications depuis la phase 1.
for EXT in ${volext}; do
vol="orga_${ARG}-${EXT}"
# test le service existe
[ -e "${DOCK_SRC}/${vol}" ] || continue
# si c'est un lien sur /var/lib c'est déjà fait
[ -z "${REVERSE}" ] && [ -L "${DOCK_SRC}/${vol}" ] && echo "${GREEN}${vol}${NC} : done" && continue
# si c'est un lien sur le NAS c'est un problème
[ -n "${REVERSE}" ] && [ -L "${DOCK_SRC}/${vol}" ] && echo "${GREEN}${vol}${NC} : bug" && continue
# si c'est pas un répertoire c'est un problème
! [ -d "${DOCK_SRC}/${vol}" ] && echo "${RED}${vol}${NC} : done ?" && continue
# si transfert est déjà fait
if [ -n "${REVERSE}" ]; then
! [ -L "${DOCK_DST}/${vol}" ] && echo "${RED}${vol}${NC} : done" && continue
fi
echo " - ${YELLOW}${vol}${NC}"
${SIMU} rsync -auHAX --info=progress2 --delete "${DOCK_SRC}/${vol}/" "${DOCK_DST}${NEW}/${vol}/" || exit 1
${SIMU} mv "${DOCK_SRC}/${vol}" "${BACK}/"
if [ -z "${REVERSE}" ]; then
# cas de /var/lib vers NAS
${SIMU} ln -sf "${DOCK_DST}/${vol}" "${DOCK_SRC}/"
else
# cas NAS vers /var/lib
${SIMU} rm -f "${DOCK_SRC}/${vol}"
${SIMU} mv "${DOCK_DST}${NEW}/${vol}" "${DOCK_DST}/"
fi
done
${SIMU} docker-compose up -d
[[ -x "reload.sh" ]] && "./reload.sh"
echo
done

83
bin2/nettoie Executable file
View File

@@ -0,0 +1,83 @@
#!/bin/bash
POUBELLE="${HOME}/tmp/POUBELLE"
mkdir -p "${POUBELLE}"
usage () {
echo `basename "$0"` " [-] [-h] [-help] [-clean] [-wipe] [-n] [directory ...]"
echo " remove temporaries files"
echo " - Treat the following arguments as filenames \`-\' so that"
echo " you can specify filenames starting with a minus."
echo " -h"
echo " -help Display this help."
echo " -n Simulate the remove (juste print files)."
echo " directories are the roots where the purge had to be done. If no"
echo " roots are given, the root is the home directory."
}
DETRUIT=""
ANT_OPT=""
ANT_CMD=""
case "$1" in
'-' )
shift;;
'-n' )
DETRUIT="echo"
ANT_OPT="-p"
shift;;
'-clean' )
ANT_CMD="clean"
shift;;
'-wipe' )
ANT_CMD="wipe"
shift;;
'-h' | '-help' )
usage
shift
exit;;
esac
DIRS=$*
if test "$#" -le 1
then
DIRS="$*"
if test -z "$1" -o -d "$1"
then
cd $1 || exit
DIRS=.
fi
fi
if test "${ANT_CMD}" != ""
then
find $DIRS -type f -name build.xml -execdir ant -f {} "${ANT_CMD}" \;
find $DIRS -type f -name Makefile\* -execdir make -f {} "${ANT_CMD}" \;
exit
fi
find $DIRS -type d -name .xvpics -exec $DETRUIT rm -r {} \; -prune
find $DIRS '(' \
-type d -name POUBELLE -prune \
-o \
-type f '(' \
-name core -o -name '*.BAK' -o -name '*.bak' -o -name '*.CKP' \
-o -name '.*.BAK' -o -name '.*.bak' -o -name '.*.CKP' \
-o -name '.*.back' -o -name '*.back' \
-o -name '*.backup' -o -name '*.backup ' \
-o -name '.*.backup' -o -name '.*.backup ' \
-o -name .make.state \
-o -name 'untitled*' -o -name 'Sansnom' \
-o -name '.emacs_*' -o -name '.wi_*' \
-o -name 'ws_ftp.log' -o -name 'hs_err*.log' \
-o -name '#*' -o -name '*~' -o -name '.*~' -o -name junk \
-o -name '.~lock.*#' \
-o -name '*%' -o -name '.*%' \
')'\
-print -exec $DETRUIT mv -f '{}' "${POUBELLE}" \; \
')'
# -o -name '*.ps' -o -name '.*.ps' \
# -o -name '*.i' -o -name '*.ixx' \
# -o -name '.*.sav' -o -name '*.sav' \

View File

@@ -0,0 +1,41 @@
#!/bin/bash
#date: 23/04/2025
#ki: fab
#koi: supprimer de acme.json les certificats LE devenus inutiles
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
FILE_ACME_ORI="/var/lib/docker/volumes/traefik_letsencrypt/_data/acme.json"
FILE_ACME="/tmp/acme.json"
FILE_URL=$(mktemp)
FILE_ACME_TMP=$(mktemp)
#l'ip du serveur:
#marche po pour les machines hébergée chez T.C... :( on récupère l'IP dans config/dockers.env
#MAIN_IP=$(curl ifconfig.me)
#DANGER: IP depuis config/dockers.env ne fonctionne pas pour les domaines hors *.kaz.bzh (ex:radiokalon.fr)
#sauvegarde
cp $FILE_ACME_ORI $FILE_ACME
cp $FILE_ACME "$FILE_ACME"_$(date +%Y%m%d_%H%M%S)
#je cherche toutes les url
jq -r '.letsencrypt.Certificates[].domain.main' $FILE_ACME > $FILE_URL
while read -r url; do
#echo "Traitement de : $url"
nb=$(dig $url | grep $MAIN_IP | wc -l)
if [ "$nb" -eq 0 ]; then
#absent, on vire de acme.json
echo "on supprime "$url
jq --arg url "$url" 'del(.letsencrypt.Certificates[] | select(.domain.main == $url))' $FILE_ACME > $FILE_ACME_TMP
mv -f $FILE_ACME_TMP $FILE_ACME
fi
done < "$FILE_URL"
echo "si satisfait, remettre "$FILE_ACME" dans "$FILE_ACME_ORI

24
bin2/nextcloud_maintenance.sh Executable file
View File

@@ -0,0 +1,24 @@
#!/bin/bash
#on récupère toutes les variables et mdp
KAZ_ROOT=/kaz
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
URL_AGORA=https://$matterHost.$domain/api/v4
EQUIPE=kaz
PostMattermost() {
. $KAZ_KEY_DIR/env-mattermostAdmin
PostM=$1
CHANNEL=$2
TEAMID=$(curl -s -H "Authorization: Bearer ${mattermost_token}" "${URL_AGORA}/teams/name/${EQUIPE}" | jq .id | sed -e 's/"//g')
CHANNELID=$(curl -s -H "Authorization: Bearer ${mattermost_token}" ${URL_AGORA}/teams/${TEAMID}/channels/name/${CHANNEL} | jq .id | sed -e 's/"//g')
curl -s i-X POST -i -H "Authorization: Bearer ${mattermost_token}" -d "{\"channel_id\":\"${CHANNELID}\",\"message\":\"${PostM}\"}" "${URL_AGORA}/posts" >/dev/null 2>&1
}
LISTEORGA=$(ls -F1 /var/lib/docker/volumes/ | grep cloudData | sed -e 's/^orga_//g' -e 's/-cloudData\///g')
for CLOUD in ${LISTEORGA}
do
/kaz/bin/gestContainers.sh -cloud -occ "maintenance:mode" ${CLOUD} | grep -i enable && PostMattermost "ERREUR : Le cloud ${CLOUD} sur ${site} est en mode maintenance" "Sysadmin-alertes"
done

28
bin2/postfix-superviz.sh Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
# supervision de postfix
KAZ_ROOT=$(cd "$(dirname $0)"/..; pwd)
. $KAZ_ROOT/bin/.commonFunctions.sh
setKazVars
. $DOCKERS_ENV
URL_AGORA=$(echo $matterHost).$(echo $domain)
MAX_QUEUE=50
OLDIFS=$IFS
IFS=" "
COUNT_MAILQ=$(docker exec -t mailServ mailq | tail -n1 | gawk '{print $5}')
# récupération mots de passes
. $KAZ_KEY_DIR/env-mattermostAdmin
docker exec ${mattermostServName} bin/mmctl --suppress-warnings auth login $httpProto://$URL_AGORA --name local-server --username $mattermost_user --password $mattermost_pass >/dev/null 2>&1
if [ "${COUNT_MAILQ}" -gt "${MAX_QUEUE}" ]; then
echo "---------------------------------------------------------- "
echo -e "Mail queue Postfix ALert, Messages: ${RED}${COUNT_MAILQ}${NC}"
echo "---------------------------------------------------------- "
docker exec mattermostServ bin/mmctl post create kaz:Sysadmin-alertes --message "Alerte mailq Postfix : La file d' attente est de ${COUNT_MAILQ} messages" >/dev/null 2>&1
fi
IFS=${OLDIFS}

17
bin2/runAlertings.sh Executable file
View File

@@ -0,0 +1,17 @@
#!/bin/bash
KAZ_ROOT=$(cd "$(dirname $0)/.."; pwd)
. "${KAZ_ROOT}/bin/.commonFunctions.sh"
setKazVars
. "${DOCKERS_ENV}"
for dockerToTest in $KAZ_ROOT/dockers/*/alerting/; do
for aTest in $dockerToTest/*; do
res=$($aTest)
if [ -n "$res" ]; then
echo $res
docker exec -ti mattermostServ bin/mmctl post create kaz:Sysadmin-alertes --message "${res:0:1000}"
fi
done
done

37
bin2/sauve_memory.sh Executable file
View File

@@ -0,0 +1,37 @@
#! /bin/sh
# date: 30/03/2022
# koi: récupérer du swap et de la ram (uniquement sur les services qui laissent filer la mémoire)
# ki: fab
#pour vérifier les process qui prennent du swap : for file in /proc/*/status ; do awk '/Tgid|VmSwap|Name/{printf $2 " " $3}END{ print ""}' $file; done | grep kB | sort -k 3 -n
# Ces commandes donnent le nom du process, son PID et la taille mémoire en swap. Par exemple :
# dans /proc/<PID>/status y'a un VMSwap qui est la taille de swap utilisée par le process.
#calc
docker restart ethercalcDB ethercalcServ
#sympa
#docker restart sympaServ
#/kaz/dockers/sympa/reload.sh
# --> bof, ça arrête mal le bazar (4 mails d'ano autour de sympa_msg.pl / bounced.pl / task_manager.pl / bulk.pl / archived.pl)
#sympa
# restart de sympa et relance du script de copie des librairies du filtre de messages
#docker exec -it sympaServ service sympa stop
#sleep 5
#docker exec -it sympaServ service sympa start
#sleep 5
#/kaz/dockers/sympa/reload.sh
#sleep 2
#docker exec sympaServ chmod 777 /home/filter/filter.sh
#docker exec sympaServ sendmail -q
#pour restart cette s.... de collabora
#/kaz/bin/gestContainers.sh -office -m -r
#postfix
docker exec -it mailServ supervisorctl restart changedetector
#proxy
#docker exec -i proxyServ bash -c "/etc/init.d/nginx reload"

Some files were not shown because too many files have changed in this diff Show More