This commit is contained in:
François 2022-02-10 01:37:43 +01:00
parent eef34d8d9d
commit 1698a2ecb8
10 changed files with 422 additions and 77 deletions

308
src/Jirafeau/a.php Normal file
View File

@ -0,0 +1,308 @@
<?php
/*
* Kaz addon (see https://git.kaz.bzh/KAZ/depollueur for information)
* create un archive for a set of file or update file deadline
a.php?u=month&h=HHHHHHHH => deadline
a.php?g=l~k => zip
a.php?s=mel@domain.org => send status e-mail
*/
define('JIRAFEAU_ROOT', dirname(__FILE__) . '/');
require(JIRAFEAU_ROOT . 'lib/settings.php');
require(JIRAFEAU_ROOT . 'lib/functions.php');
require(JIRAFEAU_ROOT . 'lib/lang.php');
/* Operations may take a long time.
* Be sure PHP's safe mode is off.
*/
@set_time_limit (0);
/* Remove errors. */
@error_reporting (0);
$do_update = false;
if (isset ($_REQUEST['u']) && !empty ($_REQUEST['u'])) {
$do_update = true;
}
$do_download = false;
if (isset ($_REQUEST['g']) && !empty ($_REQUEST['g'])) {
$do_download = true;
}
// ========================================
function return_error ($msg) {
require (JIRAFEAU_ROOT.'lib/template/header.php');
echo '<div class="error"><p>' . $msg . '</p></div>';
require (JIRAFEAU_ROOT.'lib/template/footer.php');
exit;
}
// ========================================
/** Update link
* @param $link the link's name (hash)
* @param $update_period the periode (i.e in : "month")
*/
function jirafeau_update_link ($link_name, $link, $update_period) {
$time_max = $link ['time'];
$time_up = time () + $update_period;
$time_more = $time_up + JIRAFEAU_HOUR;
if ($time_max < 0 || $time_up < $time_max)
return $time_max;
$link ['time'] = $time_more;
$link_tmp_name = VAR_LINKS . $link['hash'] . rand (0, 10000) . '.tmp';
$handle = fopen ($link_tmp_name, 'w');
fwrite ($handle,
$link['file_name'] .NL. $link['mime_type'] .NL. $link['file_size'] .NL. $link['key'] .NL. $link['time'] .NL.
$link['hash'] .NL. $link['onetime'] .' '.JIRAFEAU_MONTH . ' '. JIRAFEAU_DAY .NL. $link['upload_date'] .NL.
$link['ip'] .NL. $link['link_code'] .NL. $link['crypted']);
fclose ($handle);
$link_file = VAR_LINKS . s2p("$link_name") . $link_name;
rename ($link_tmp_name, $link_file);
return $time_more;
}
// ========================================
function read_archive_info ($link) {
$p = s2p ($link ['hash']);
// read archive info
$result=[];
foreach (file (VAR_FILES . $p . $link['hash']) as $line) {
switch (true) {
case preg_match ("/^\s*src:\s*(([a-z0-9\+_\-]+)(\.[a-z0-9\+_\-]+)*@([a-z0-9\-]+\.)+[a-z]{2,6})\s*$/i", $line, $matches):
$result['sender'] = $matches[1];
break;
case preg_match ("/^\s*time:\s*(\d{4}([:-]\d{2}){5})\s*$/i", $line, $matches):
$result['timestamp'] = $matches[1];
break;
case preg_match ("/^\s*old:\s*([0-9a-zA-Z_-]+)\s+([0-9a-zA-Z_-]+)\s*$/", $line, $matches):
$result['old'][] = [$matches[1], $matches[2]];
break;
case preg_match ("/^\s*new:\s*([0-9a-zA-Z_-]+)\s+([0-9a-zA-Z_-]+)\s*$/", $line, $matches):
$result['new'][] = [$matches[1], $matches[2]];
break;
default:
break;
}
}
return $result;
}
// ========================================
if ($do_update) {
$update_period = JIRAFEAU_MONTH;
switch ($_REQUEST['u']) {
case 'minute':
$update_period = JIRAFEAU_MINUTE;
break;
case 'hour':
$update_period = JIRAFEAU_HOUR;
break;
case 'day':
$update_period = JIRAFEAU_DAY;
break;
case 'week':
$update_period = JIRAFEAU_WEEK;
break;
case 'month':
$update_period = JIRAFEAU_MONTH;
break;
case 'quarter':
$update_period = JIRAFEAU_QUARTER;
break;
case 'year':
$update_period = JIRAFEAU_YEAR;
break;
default:
return_error (t('ERR_OCC') . ' (update_period)');
}
$link_name = $_GET['h'];
if (!preg_match('/[0-9a-zA-Z_-]+$/', $link_name))
return_error (t('FILE_404'));
$link = jirafeau_get_link($link_name);
if (count($link) == 0)
return_error (t('FILE_404'));
$time = jirafeau_update_link($link_name, $link, $update_period);
$content = '' . $time . NL;
header('HTTP/1.0 200 OK');
header('Content-Length: ' . strlen ($content));
header('Content-Type: text/plain');
echo $content;
exit;
}
// ========================================
if ($do_download) {
// check archive exist
$couple = explode ("~", $_REQUEST['g'], 2);
if (count ($couple) == 0)
return_error ("bad archive name format : ".$_REQUEST['g']);
$link_name = $couple [0];
if (!$link_name || !preg_match ('/[0-9a-zA-Z_-]+$/', $link_name))
return_error ("bad archive name format : ".$_REQUEST['g']);
$crypt_key = count ($couple) == 2 ? $couple [1] : "";
$link = jirafeau_get_link ($link_name);
if (count ($link) == 0)
return_error (t ('FILE_404'));
$key = $link['key'];
if ($key && (empty ($crypt_key) || $key != $crypt_key))
return_error (t ('BAD_PSW'));
$archive_info = read_archive_info ($link);
// check entries
$archive_content = [];
$modif = false;
foreach (["old", "new"] as $cat) {
$single_name = [];
if (isset ($archive_info[$cat]))
foreach ($archive_info[$cat] as [$link_name, $crypt_key]) {
$link = jirafeau_get_link ($link_name);
if (count ($link) == 0) {
++$archive_content[$cat]['not_found'];
$modif = true;
continue;
}
$key = $link['key'];
if ($key && (empty ($crypt_key) || $key != $crypt_key)) {
++$archive_content[$cat]['bad_pass'];
$modif = true;
continue;
}
$src_name = $dst_name = $link['file_name'];
if (in_array ($src_name, $single_name))
for ($i = 0; $i < 10000; ++$i) {
$dst_name = sprintf ("%s-%2d", $src_name, $i);
if (!in_array ($dst_name, $single_name)) {
++$archive_content[$cat]['rename'];
$modif = true;
break;
}
}
$single_name [] = $dst_name;
$archive_content[$cat]['entries'][] = ['hash' => $link['hash'], 'file_name' => $dst_name, 'crypt_key' => $crypt_key, 'crypted' => $link['crypted']];
}
}
// build zip
$dirname = (isset ($archive_info['sender']) && !empty ($archive_info['sender']))
? $archive_info['sender'] : "kaz";
$dirname .= "-" . (isset ($archive_info['timestamp']) && !empty ($archive_info['timestamp']))
? $archive_info['timestamp'] : date ("Ymd-His");
$tmpFileName = tempnam (sys_get_temp_dir (), $dirname."-");
$zip = new ZipArchive;
if (!$zip)
return_error ("can't create zip");
if ($zip->open ($tmpFileName.".zip", ZipArchive::CREATE) !== TRUE)
return_error ("can't open zip");
// create info XXX
if ($modif) {
$info = '';
$not_found = $bad_pass = $rename = 0;
foreach (["old", "new"] as $cat) {
if (isset ($archive_info[$cat]['not_found']))
$not_found += $archive_info[$cat]['not_found'];
if (isset ($archive_info[$cat]['bad_pass']))
$bad_pass += $archive_info[$cat]['bad_pass'];
if (isset ($archive_info[$cat]['rename']))
$rename += $archive_info[$cat]['rename'];
}
if ($not_found)
$info .= $not_found. ($not_found ? " fichier est expiré." : " fichiers sont expirés.").NL;
if ($bad_pass)
$info .= "Mauvaise clef pour ". $bad_pass. ($bad_pass ? " fichier" : " fichiers.").NL;
if ($rename)
$info .= $rename. ($rename ? " fichier renommé." : " fichiers sont renommés.").NL;
$zip->addFromString ($dirname."-Avertissement.txt", $info);
}
foreach (["old", "new"] as $cat)
if (isset ($archive_info [$cat])) {
$subdir = $dirname . ($cat == "new" ? "" : "/origine");
foreach ($archive_content [$cat]['entries'] as $entry) {
$p = s2p ($entry ['hash']);
if ($entry['crypted']) {
$m = mcrypt_module_open ('rijndael-256', '', 'ofb', '');
$md5_key = md5 ($entry['crypt_key']);
$iv = jirafeau_crypt_create_iv ($md5_key, mcrypt_enc_get_iv_size ($m));
mcrypt_generic_init ($m, $md5_key, $iv);
$r = fopen (VAR_FILES . $p . $entry['hash'], 'r');
$content = "";
while (!feof ($r)) {
$dec = mdecrypt_generic ($m, fread ($r, 1024));
$content .= $dec;
ob_flush ();
}
fclose ($r);
$zip->addFromString ($subdir."/".$entry['file_name'], $content);
mcrypt_generic_deinit ($m);
mcrypt_module_close ($m);
continue;
}
$zip->addFile (VAR_FILES.$p.$entry['hash'], $subdir."/".$entry['file_name']);
}
}
$zip->close ();
if (!is_file ($tmpFileName.".zip"))
return_error ("can't retreive tmp");
if (false) {
// log
$content = print_r ($archive_info, 1);
$content .= print_r ($archive_content, 1);
header('HTTP/1.0 200 OK');
header('Content-Length: ' . strlen ($content));
header('Content-Type: text/plain');
echo $content;
exit;
}
header ("Content-Type: application/zip");
header ('Content-Disposition: filename="'.$dirname.'.zip"');
$r = fopen($tmpFileName.".zip", 'r');
while (!feof ($r)) {
print fread ($r, 1024);
ob_flush ();
}
fclose ($r);
unlink ($tmpFileName.".zip");
unlink ($tmpFileName);
exit;
}
// ========================================
// XXX form send
$content = "TODO send form".NL;
if (isset ($_REQUEST['s']) && !empty ($_REQUEST['s'])) {
$sender=$_REQUEST ['s'];
if (!preg_match("/^([a-z0-9\+_\-]+)(\.[a-z0-9\+_\-]+)*@([a-z0-9\-]+\.)+[a-z]{2,6}$/ix", $sender))
return_error ("bad dir name format : ".$sender);
$content = "${sender}".NL;
}
// XXX find
// add link : name / time / download / delete
// sort by name ?
// $tmpFileName = tempnam (sys_get_temp_dir (), $dirname."-");
// $fd = fopen ($tmpFileName, "w");
// if (!$fd)
// return_error ("Unable to open tmp file!");
// fwrite ($fd, $content);
// fclose ($fd);
header('HTTP/1.0 200 OK');
header('Content-Length: ' . strlen ($content));
header('Content-Type: text/plain');
echo $content;
?>

View File

@ -65,9 +65,19 @@ if ($zip->open ($tmpFileName.".zip", ZipArchive::CREATE) !== TRUE)
if ($notFoundCount) { if ($notFoundCount) {
$zip->addFromString ($dirname."-Avertissement.txt", $notFoundCount. ($notFoundCount ? " fichier est expiré." : " fichiers sont expirés.")); $zip->addFromString ($dirname."-Avertissement.txt", $notFoundCount. ($notFoundCount ? " fichier est expiré." : " fichiers sont expirés."));
} }
$single_name=[];
foreach ($map as $link_name => $crypt_key) { foreach ($map as $link_name => $crypt_key) {
$link = jirafeau_get_link ($link_name); $link = jirafeau_get_link ($link_name);
$p = s2p ($link ['md5']); $p = s2p ($link ['hash']);
$src_name = $dst_name = $link['file_name'];
if (in_array ($src_name, $single_name))
for ($i = 0; $i < 10000; ++$i) {
$dst_name = sprintf ("%s-%2d", $src_name, $i);
if (!in_array ($dst_name, $single_name))
break;
}
$single_name[]=$dst_name;
// send // send
if ($link['crypted']) { if ($link['crypted']) {
@ -75,7 +85,7 @@ foreach ($map as $link_name => $crypt_key) {
$md5_key = md5 ($crypt_key); $md5_key = md5 ($crypt_key);
$iv = jirafeau_crypt_create_iv ($md5_key, mcrypt_enc_get_iv_size ($m)); $iv = jirafeau_crypt_create_iv ($md5_key, mcrypt_enc_get_iv_size ($m));
mcrypt_generic_init ($m, $md5_key, $iv); mcrypt_generic_init ($m, $md5_key, $iv);
$r = fopen (VAR_FILES . $p . $link['md5'], 'r'); $r = fopen (VAR_FILES . $p . $link['hash'], 'r');
$content = ""; $content = "";
while (!feof ($r)) { while (!feof ($r)) {
$dec = mdecrypt_generic ($m, fread ($r, 1024)); $dec = mdecrypt_generic ($m, fread ($r, 1024));
@ -83,13 +93,13 @@ foreach ($map as $link_name => $crypt_key) {
ob_flush (); ob_flush ();
} }
fclose ($r); fclose ($r);
$zip->addFromString ($dirname."/".$link['file_name'], $content); $zip->addFromString ($dirname."/".$dst_name, $content);
mcrypt_generic_deinit ($m); mcrypt_generic_deinit ($m);
mcrypt_module_close ($m); mcrypt_module_close ($m);
continue; continue;
} }
$zip->addFile (VAR_FILES . $p . $link['md5'], $dirname."/".$link['file_name']); $zip->addFile (VAR_FILES . $p . $link['hash'], $dirname."/".$dst_name);
} }
$zip->close (); $zip->close ();

View File

@ -42,8 +42,8 @@
########################################################################## ##########################################################################
cd $(dirname $0) cd $(dirname $0)
DOMAINNAME=$(cat domainname)
# Exit coINSPECT_DIRdes from <sysexits.h> # Exit codes from <sysexits.h>
EX_TEMPFAIL=75 EX_TEMPFAIL=75
EX_UNAVAILABLE=69 EX_UNAVAILABLE=69
EX_TOO_LARGE=552 EX_TOO_LARGE=552
@ -55,8 +55,8 @@ MAX_KEEP_IN_MAIL=24ki
MAX_UPLOAD_SIZE=100Mi MAX_UPLOAD_SIZE=100Mi
SHRINK_CMD=/home/filter/eMailShrinker SHRINK_CMD=/home/filter/eMailShrinker
JIRAFEAU_CMD=/home/filter/jirafeauAPI JIRAFEAU_CMD=/home/filter/jirafeauAPI
JIRAFEAU_OLDURL="https://\(file\|depot\)\.kaz\.bzh" JIRAFEAU_URL=https://depot.${DOMAINNAME:-"kaz.bzh"}
JIRAFEAU_URL=https://depot.kaz.bzh JIRAFEAU_LOCAL=http://depot
JIRAFEAU_TIME=month JIRAFEAU_TIME=month
MD5_CMD=/usr/bin/md5sum MD5_CMD=/usr/bin/md5sum
DISCLAMER_CMD=altermime DISCLAMER_CMD=altermime
@ -78,10 +78,12 @@ DATE_TEMPS=$(date "+%Y-%m-%d-%H:%M:%S")
REP_PIECE_JOINTE=$(echo "${MAILS}/${DATE_TEMPS}_${MAIL_SOURCE}_$$") REP_PIECE_JOINTE=$(echo "${MAILS}/${DATE_TEMPS}_${MAIL_SOURCE}_$$")
cd "${INSPECT_DIR}" || { echo "${INSPECT_DIR} does not exist"; exit "${EX_TEMPFAIL}"; } cd "${INSPECT_DIR}" || { echo "${INSPECT_DIR} does not exist"; exit "${EX_TEMPFAIL}"; }
# lien renvoyé par le téléverssement
ONE_LINK="${REP_PIECE_JOINTE}/one.txt" ONE_LINK="${REP_PIECE_JOINTE}/one.txt"
ALL_LINKS="${REP_PIECE_JOINTE}/url-list.txt" # anciens liens à réactiver
OLD_LINKS="${REP_PIECE_JOINTE}/url-to-refresh.txt" OLD_LINKS="${REP_PIECE_JOINTE}/url-to-refresh.txt"
PREV_CODES="${REP_PIECE_JOINTE}/prev-codes.txt" # contenu de l'archive
ARCHIVE_CONTENT="${REP_PIECE_JOINTE}/archive-content.txt"
# Clean up when done or when aborting. # Clean up when done or when aborting.
trap "rm -rf in.$$ in.$$.altered ${REP_PIECE_JOINTE}" 0 1 2 3 15 trap "rm -rf in.$$ in.$$.altered ${REP_PIECE_JOINTE}" 0 1 2 3 15
@ -91,27 +93,28 @@ cat > "in.$$" || { LOG_FIC "Cannot save mail to file"; exit "${EX_TEMPFAIL}"; }
# cp "${INSPECT_DIR}/in.$$" "${INSPECT_DIR}/in.$$.bak" # cp "${INSPECT_DIR}/in.$$" "${INSPECT_DIR}/in.$$.bak"
mkdir -p "${REP_PIECE_JOINTE}/" mkdir -p "${REP_PIECE_JOINTE}/"
>"${ALL_LINKS}"
>"${OLD_LINKS}" >"${OLD_LINKS}"
>"${PREV_CODES}" >"${ARCHIVE_CONTENT}"
# Etape de rafraichissement des anciens fichiers inclus # Etape de rafraichissement des anciens fichiers inclus
OLD_CODES="" cat > "${ARCHIVE_CONTENT}" <<EOF
src: ${MAIL_SOURCE}
time: ${DATE_TEMPS}
EOF
LOG_FIC "${SHRINK_CMD} -u \"${INSPECT_DIR}/in.$$\" 2>> \"${FIC_LOG}\" > \"${OLD_LINKS}\"" LOG_FIC "${SHRINK_CMD} -u \"${INSPECT_DIR}/in.$$\" 2>> \"${FIC_LOG}\" > \"${OLD_LINKS}\""
"${SHRINK_CMD}" -u "${INSPECT_DIR}/in.$$" 2>> "${FIC_LOG}" > "${OLD_LINKS}" "${SHRINK_CMD}" -u "${INSPECT_DIR}/in.$$" 2>> "${FIC_LOG}" > "${OLD_LINKS}"
cat "${OLD_LINKS}" | grep "${JIRAFEAU_OLDURL}" | while read REMOTE_LINK cat "${OLD_LINKS}" | grep "${JIRAFEAU_URL}" | while read REMOTE_LINK
do do
REMOTE_REF=$(echo "${REMOTE_LINK}" | sed -e 's/.*h=\([^&]*\).*/\1/' -e 's/.*http.*//') REMOTE_REF=$(echo "${REMOTE_LINK}" | sed -e 's/.*h=\([^&]*\).*/\1/' -e 's/.*http.*//')
[ -z "${REMOTE_REF}" ] && continue [ -z "${REMOTE_REF}" ] && continue
REMOTE_KEY=$(echo "${REMOTE_LINK}" | grep "k=" | sed 's%.*k=\([^&]*\).*%\1%') REMOTE_KEY=$(echo "${REMOTE_LINK}" | grep "k=" | sed 's%.*k=\([^&]*\).*%\1%')
# update periode for download # update periode for download
LOG_FIC " - \"${JIRAFEAU_CMD}\" -f \"${JIRAFEAU_URL}\" -t \"${JIRAFEAU_TIME}\" update \"${REMOTE_REF}\" 2>&1 >> \"${FIC_LOG}\"" LOG_FIC " - \"${JIRAFEAU_CMD}\" -f \"${JIRAFEAU_LOCAL}\" -t \"${JIRAFEAU_TIME}\" update \"${REMOTE_REF}\" 2>&1 >> \"${FIC_LOG}\""
"${JIRAFEAU_CMD}" -f "${JIRAFEAU_URL}" -t "${JIRAFEAU_TIME}" update "${REMOTE_REF}" 2>&1 >> "${FIC_LOG}" "${JIRAFEAU_CMD}" -f "${JIRAFEAU_LOCAL}" -t "${JIRAFEAU_TIME}" update "${REMOTE_REF}" 2>&1 >> "${FIC_LOG}"
echo -n "/${REMOTE_REF}~${REMOTE_KEY}" >> "${PREV_CODES}" echo "old: ${REMOTE_REF} ${REMOTE_KEY}" >> "${ARCHIVE_CONTENT}"
done done
OLD_CODES=$(cat "${PREV_CODES}") LOG_FIC " - archive starts with: $(cat ${ARCHIVE_CONTENT})"
LOG_FIC " - OLD_CODES=${OLD_CODES}"
# Etape extraction des pieces jointes # Etape extraction des pieces jointes
LOG_FIC "${SHRINK_CMD} -s ${MAX_KEEP_IN_MAIL} -d ${REP_PIECE_JOINTE} ${INSPECT_DIR}/in.$$" LOG_FIC "${SHRINK_CMD} -s ${MAX_KEEP_IN_MAIL} -d ${REP_PIECE_JOINTE} ${INSPECT_DIR}/in.$$"
@ -124,46 +127,67 @@ LOG_FIC "${SHRINK_CMD} -s ${MAX_KEEP_IN_MAIL} -d ${REP_PIECE_JOINTE} ${INSPECT_D
ATTACH_NAME=$(grep "^Name: " "${ATTACH_TMP_NAME}/meta" | cut -c 7- ) ATTACH_NAME=$(grep "^Name: " "${ATTACH_TMP_NAME}/meta" | cut -c 7- )
ATTACH_CONTENT_TYPE=$(grep "^Content-Type: " "${ATTACH_TMP_NAME}/meta" | cut -c 15- ) ATTACH_CONTENT_TYPE=$(grep "^Content-Type: " "${ATTACH_TMP_NAME}/meta" | cut -c 15- )
else else
# XXX a virer # XXX error
ATTACH_MEDIA="${ATTACH_TMP_NAME}" continue
ATTACH_NAME=$(basename "${ATTACH_MEDIA}")
ATTACH_CONTENT_TYPE=""
fi fi
# Etape de televersement des pieces jointes # Etape de televersement des pieces jointes
PASSWORD=$(apg -n 1 -m 12) PASSWORD=$(apg -n 1 -m 12)
PASSWORD_MD5=$(echo -n ${PASSWORD} | ${MD5_CMD} | cut -d \ -f 1) PASSWORD_MD5=$(echo -n ${PASSWORD} | ${MD5_CMD} | cut -d \ -f 1)
actualSize=$(ls -l "${ATTACH_MEDIA}") LOG_FIC " - \"${JIRAFEAU_CMD}\" -f \"${JIRAFEAU_LOCAL}\" -s \"${MAX_UPLOAD_SIZE}\" -c \"${ATTACH_CONTENT_TYPE}\" -n \"${ATTACH_NAME}\" send \"${ATTACH_MEDIA}\" \"${PASSWORD}\" 2>> \"${FIC_LOG}\" > \"${ONE_LINK}\""
LOG_FIC " - \"${JIRAFEAU_CMD}\" -f \"${JIRAFEAU_URL}\" -s \"${MAX_UPLOAD_SIZE}\" -c \"${ATTACH_CONTENT_TYPE}\" -n \"${ATTACH_NAME}\" send \"${ATTACH_MEDIA}\" \"${PASSWORD}\" 2>> \"${FIC_LOG}\" > \"${ONE_LINK}\"" "${JIRAFEAU_CMD}" -f "${JIRAFEAU_LOCAL}" -s "${MAX_UPLOAD_SIZE}" -c "${ATTACH_CONTENT_TYPE}" -n "${ATTACH_NAME}" send "${ATTACH_MEDIA}" "${PASSWORD}" 2>> "${FIC_LOG}" > "${ONE_LINK}"
"${JIRAFEAU_CMD}" -f "${JIRAFEAU_URL}" -s "${MAX_UPLOAD_SIZE}" -c "${ATTACH_CONTENT_TYPE}" -n "${ATTACH_NAME}" send "${ATTACH_MEDIA}" "${PASSWORD}" 2>> "${FIC_LOG}" > "${ONE_LINK}"
cat "${ONE_LINK}" | { cat "${ONE_LINK}" | {
read JIR_TOKEN read JIR_TOKEN
read JIR_CODE read JIR_CODE
LOG_FIC " - Jirafeau envoie ${JIR_TOKEN} et ${JIR_CODE}" LOG_FIC " - Jirafeau retourne ${JIR_TOKEN} et ${JIR_CODE}"
case "${JIR_TOKEN}" in case "${JIR_TOKEN}" in
"" | no | *Error* | \<* ) "" | no | *Error* | \<* )
LOG_FIC " - impossible de televerser ${ATTACH_TMP_FILE} (${JIR_TOKEN}), il ne sera pas remplace dans le message" LOG_FIC " - impossible de televerser ${ATTACH_TMP_FILE} (${JIR_TOKEN}), il ne sera pas remplace dans le message"
echo "" echo "new:"
;; ;;
* ) * )
LOG_FIC " - substitution par ${JIRAFEAU_URL}/f.php?d=1&h=${JIR_TOKEN}&k=${PASSWORD_MD5}" LOG_FIC " - substitution par ${JIRAFEAU_URL}/f.php?d=1&h=${JIR_TOKEN}&k=${PASSWORD_MD5}"
echo "${JIRAFEAU_URL}/f.php?d=1&h=${JIR_TOKEN}&k=${PASSWORD_MD5} /${JIR_TOKEN}~${PASSWORD_MD5}" echo "url: ${JIRAFEAU_URL}/f.php?d=1&h=${JIR_TOKEN}&k=${PASSWORD_MD5}"
echo "${JIRAFEAU_URL}/f.php?d=1&h=${JIR_TOKEN}&k=${PASSWORD_MD5}" >> "${ALL_LINKS}" echo "new: ${JIR_TOKEN} ${PASSWORD_MD5}" >> "${ARCHIVE_CONTENT}"
;; ;;
esac esac
} }
LOG_FIC " - supprimer l'extraction ${ATTACH_TMP_FILE}" LOG_FIC " - supprimer l'extraction ${ATTACH_TMP_FILE}"
rm -f "${ATTACH_TMP_FILE}" rm -f "${ATTACH_TMP_FILE}"
done done
# Création de l'archive
if [ "$(wc -l < "${ARCHIVE_CONTENT}")" -ge 4 ]; then
PASSWORD=$(apg -n 1 -m 12)
PASSWORD_MD5=$(echo -n ${PASSWORD} | ${MD5_CMD} | cut -d \ -f 1)
LOG_FIC " - \"${JIRAFEAU_CMD}\" -f \"${JIRAFEAU_LOCAL}\" -s \"${MAX_UPLOAD_SIZE}\" -c \"text/plain\" -n \"archive_content\" send \"${ARCHIVE_CONTENT}\" \"${PASSWORD}\" 2>> \"${FIC_LOG}\" > \"${ONE_LINK}\""
"${JIRAFEAU_CMD}" -f "${JIRAFEAU_LOCAL}" -s "${MAX_UPLOAD_SIZE}" -c "text/plain" -n "archive_content" send "${ARCHIVE_CONTENT}" "${PASSWORD}" 2>> "${FIC_LOG}" > "${ONE_LINK}"
cat "${ONE_LINK}" | {
read JIR_TOKEN
read JIR_CODE
LOG_FIC " - Jirafeau retourne ${JIR_TOKEN} et ${JIR_CODE}"
case "${JIR_TOKEN}" in
"" | no | *Error* | \<* )
LOG_FIC " - impossible de televerser l'archive (${JIR_TOKEN}), il ne sera pas remplace dans le message"
echo "arch: bad"
;;
* )
LOG_FIC " - ajoute de l'archive ${JIRAFEAU_URL}/a.php?g=${JIR_TOKEN}~${PASSWORD_MD5}"
echo "arch: ${JIRAFEAU_URL}/a.php?g=${JIR_TOKEN}~${PASSWORD_MD5}"
;;
esac
}
else
LOG_FIC " - pas d'archive (moins de 2 PJ)"
echo "arch: none"
fi
# Etape de substitution # Etape de substitution
LOG_FIC "${SHRINK_CMD} -a \"${JIRAFEAU_URL}/t.php?n=${MAIL_SOURCE}_${DATE_TEMPS}&l=${OLD_CODES}\" -s \"${MAX_KEEP_IN_MAIL}\" \"${INSPECT_DIR}/in.$$\" \"${INSPECT_DIR}/in.$$.altered\" 2>> \"${FIC_LOG}\"" LOG_FIC "${SHRINK_CMD} -s \"${MAX_KEEP_IN_MAIL}\" \"${INSPECT_DIR}/in.$$\" \"${INSPECT_DIR}/in.$$.altered\" 2>> \"${FIC_LOG}\""
} | "${SHRINK_CMD}" -a "${JIRAFEAU_URL}/t.php?n=${MAIL_SOURCE}_${DATE_TEMPS}&l=${OLD_CODES}" -s "${MAX_KEEP_IN_MAIL}" "${INSPECT_DIR}/in.$$" "${INSPECT_DIR}/in.$$.altered" 2>> "${FIC_LOG}" } | "${SHRINK_CMD}" -s "${MAX_KEEP_IN_MAIL}" "${INSPECT_DIR}/in.$$" "${INSPECT_DIR}/in.$$.altered" 2>> "${FIC_LOG}"
# XXX trace # XXX trace
# cp "${INSPECT_DIR}/in.$$" "${INSPECT_DIR}/in.$$.altered" /var/mail/tmp/ # cp "${INSPECT_DIR}/in.$$" "${INSPECT_DIR}/in.$$.altered" /var/mail/tmp/
# Etape choix de modification du message d'origine # Etape choix de modification du message d'origine
if [ -s "${ALL_LINKS}" -o -s "${OLD_LINKS}" ] if [ "$(wc -l < "${ARCHIVE_CONTENT}")" -ge 3 ]; then
then
# verification de taille finale # verification de taille finale
actualSize=$(wc -c < "${INSPECT_DIR}/in.$$.altered") actualSize=$(wc -c < "${INSPECT_DIR}/in.$$.altered")
if [ ${actualSize} -ge $MAX_FINAL_SIZE ]; then if [ ${actualSize} -ge $MAX_FINAL_SIZE ]; then

View File

@ -0,0 +1,4 @@
filter unix - n n - 10 pipe
flags=Rq user=filter null_sender=
argv=/home/filter/filter.sh -f ${sender} -- ${recipient}

View File

@ -89,7 +89,7 @@ kaz::operator << (ostream& os, const EmbeddedData& embeddedData) {
os << embeddedData.imgIdx << ": " os << embeddedData.imgIdx << ": "
<< embeddedData.contentType << " - " << embeddedData.name << embeddedData.contentType << " - " << embeddedData.name
<< " (" << embeddedData.startData << " / " << embeddedData.dataLength << ") " << " (" << embeddedData.startData << " / " << embeddedData.dataLength << ") "
<< embeddedData.downloadUrl << " - " << embeddedData.downloadId << embeddedData.downloadUrl
<< endl; << endl;
return os; return os;
} }

View File

@ -124,18 +124,28 @@ MainAttachment::copy (ifstream &mbox, ofstream &outbox, const streamoff &begin,
// ================================================================================ // ================================================================================
void void
MainAttachment::fillUrlId (string &url, string &id) { MainAttachment::readArchiveUrl () {
DEF_LOG ("MainAttachment::fillUrlId", ""); DEF_LOG ("MainAttachment::readArchiveUrl", "");
url = id = ""; archiveDownloadURL.clear ();
string urlId; string line;
getline (cin, urlId); getline (cin, line);
LOG ("get URL: " << urlId); LOG_BUG (line.rfind ("arch: ", 0) != 0, return, "eMailShrinker: bug ZZ: no archive link. (line: " << line << ")");
vector<string> urlIdVect { sregex_token_iterator (urlId.begin(), urlId.end (), whiteSpaceRegEx, -1), {} }; LOG_BUG (line.rfind ("arch: bad", 0) == 0, return, "eMailShrinker: bug ZZ: bad archive link. (line: " << line << ")");
if (urlIdVect [0].empty ()) if (line.rfind ("arch: none", 0) == 0)
return; return;
url = urlIdVect [0]; archiveDownloadURL = line.substr (6);
if (urlIdVect.size () > 1) }
id = urlIdVect [1];
// ================================================================================
void
MainAttachment::readDownloadUrl (string &url) {
DEF_LOG ("MainAttachment::readDownloadUrl", "");
url = "";
string line;
getline (cin, line);
LOG ("get URL: " << line);
LOG_BUG (line.rfind ("url: ", 0) != 0, return, "eMailShrinker: bug ZZ: no download link. (line: " << line << ")");
url = line.substr (5);
} }
// ================================================================================ // ================================================================================
@ -148,11 +158,6 @@ MainAttachment::setExtractDir (const bfs::path &extractDir) {
bfs::create_directory (extractDir); bfs::create_directory (extractDir);
} }
void
MainAttachment::setArchiveDownloadURL (const string &archiveDownloadURL) {
this->archiveDownloadURL = archiveDownloadURL;
}
// ================================================================================ // ================================================================================
void void
MainAttachment::addLink (string &plain, string &html, const string &url, const string &name) const { MainAttachment::addLink (string &plain, string &html, const string &url, const string &name) const {
@ -175,14 +180,12 @@ MainAttachment::getDisclaim (string &plain, string &html) const {
plain = html = ""; plain = html = "";
int linkCount (0); int linkCount (0);
string allId;
string plainNewLinks, htmlNewLinks; string plainNewLinks, htmlNewLinks;
for (Attachment *attachP : allMarkedPtrs) { for (Attachment *attachP : allMarkedPtrs) {
if (!attachP->toExtract) if (!attachP->toExtract)
continue; continue;
addLink (plainNewLinks, htmlNewLinks, attachP->downloadUrl, attachP->getAttachName ()); addLink (plainNewLinks, htmlNewLinks, attachP->downloadUrl, attachP->getAttachName ());
++linkCount; ++linkCount;
allId += attachP->downloadId;
// if (previousLinks [attachP->downloadUrl] != previousLinks.end ()) // if (previousLinks [attachP->downloadUrl] != previousLinks.end ())
// // impossible puisque le lien est toujours nouveau // // impossible puisque le lien est toujours nouveau
// previousLinks.erase (attachP->downloadUrl); // previousLinks.erase (attachP->downloadUrl);
@ -193,10 +196,8 @@ MainAttachment::getDisclaim (string &plain, string &html) const {
for (EmbeddedData &embedded : attachP->embeddedData) { for (EmbeddedData &embedded : attachP->embeddedData) {
addLink (plainNewLinks, htmlNewLinks, embedded.downloadUrl, embedded.name); addLink (plainNewLinks, htmlNewLinks, embedded.downloadUrl, embedded.name);
++linkCount; ++linkCount;
allId += embedded.downloadId;
} }
} }
LOG ("allId:" << allId);
string plainOldLinks, htmlOldLinks; string plainOldLinks, htmlOldLinks;
for (map <string, string>::const_iterator it = previousLinks.begin (); it != previousLinks.end (); ++it) for (map <string, string>::const_iterator it = previousLinks.begin (); it != previousLinks.end (); ++it)
@ -215,11 +216,10 @@ MainAttachment::getDisclaim (string &plain, string &html) const {
} }
if (linkCount > 1 && archiveDownloadURL.length ()) { if (linkCount > 1 && archiveDownloadURL.length ()) {
string allPlainLinks (templatePlainAllLink); string allPlainLinks (templatePlainAllLink);
replaceAll (allPlainLinks, TMPL_DOWNLOAD, archiveDownloadURL+allId); replaceAll (allPlainLinks, TMPL_DOWNLOAD, archiveDownloadURL);
plain += allPlainLinks; plain += allPlainLinks;
string allLinks (templateHtmlAllLink); string allLinks (templateHtmlAllLink);
// allId => & => &amp; done replaceAll (allLinks, TMPL_DOWNLOAD, archiveDownloadURL);
replaceAll (allLinks, TMPL_DOWNLOAD, archiveDownloadURL+allId);
html += allLinks; html += allLinks;
} }
html += templateHtmlFooter; html += templateHtmlFooter;
@ -503,7 +503,7 @@ MainAttachment::substitute (ifstream &mbox, ofstream &outbox, const SizeArg &min
map<const string, const string> translateHtml; map<const string, const string> translateHtml;
for (Attachment *attachP : allMarkedPtrs) for (Attachment *attachP : allMarkedPtrs)
if (attachP->toExtract) { if (attachP->toExtract) {
fillUrlId (attachP->downloadUrl, attachP->downloadId); readDownloadUrl (attachP->downloadUrl);
if (attachP->downloadUrl.empty ()) { if (attachP->downloadUrl.empty ()) {
LOG ("no change"); LOG ("no change");
attachP->toExtract = false; attachP->toExtract = false;
@ -519,8 +519,9 @@ MainAttachment::substitute (ifstream &mbox, ofstream &outbox, const SizeArg &min
if (!attachP->embeddedData.size ()) if (!attachP->embeddedData.size ())
continue; continue;
for (EmbeddedData &embedded : attachP->embeddedData) for (EmbeddedData &embedded : attachP->embeddedData)
fillUrlId (embedded.downloadUrl, embedded.downloadId); readDownloadUrl (embedded.downloadUrl);
} }
readArchiveUrl ();
string plainDisclaim, htmlDisclaim; string plainDisclaim, htmlDisclaim;
getDisclaim (plainDisclaim, htmlDisclaim); getDisclaim (plainDisclaim, htmlDisclaim);
// copy email // copy email

View File

@ -32,7 +32,7 @@
// knowledge of the CeCILL-B license and that you accept its terms. // // knowledge of the CeCILL-B license and that you accept its terms. //
//////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////
#define LAST_VERSION "eMailShrinker 1.4 2021-05-07" #define LAST_VERSION "2.0 2022-02-08 eMailShrinker"
#include <iostream> #include <iostream>
#include <fstream> #include <fstream>
@ -67,7 +67,7 @@ usage (const string &msg = "", const bool &hidden = false) {
<< "Usage: " << endl << "Usage: " << endl
<< " A) " << prog << " -u mbox > url-list" << endl << " A) " << prog << " -u mbox > url-list" << endl
<< " B) " << prog << " [-s size] [-d dirName}] mbox > file-list" << endl << " B) " << prog << " [-s size] [-d dirName}] mbox > file-list" << endl
<< " C) " << prog << " [-s size] [-a url] mbox altered-mbox < url-list" << endl << " C) " << prog << " [-s size] mbox altered-mbox < url-list" << endl
<< endl << " filter attachments" << endl << endl << endl << " filter attachments" << endl << endl
<< " A: list previous embded url need to be refresh (no added option)" << endl << " A: list previous embded url need to be refresh (no added option)" << endl
<< " => downloadURL list" << endl << " => downloadURL list" << endl
@ -114,7 +114,7 @@ main (int argc, char** argv) {
updateListFlag (false), updateListFlag (false),
useTheForceLuke (false), useTheForceLuke (false),
listFlag (false); listFlag (false);
string inputName, outputName, archiveDownloadURL; string inputName, outputName;
bfs::path extractDir (bfs::temp_directory_path ()); bfs::path extractDir (bfs::temp_directory_path ());
SizeArg minAttachSize ("48 Ki"); SizeArg minAttachSize ("48 Ki");
@ -125,7 +125,6 @@ main (int argc, char** argv) {
("size,s", value<SizeArg> (&minAttachSize)->default_value (minAttachSize), "minimum size for extration") ("size,s", value<SizeArg> (&minAttachSize)->default_value (minAttachSize), "minimum size for extration")
("updateList,u", bool_switch (&updateListFlag), "list URL need refresh") ("updateList,u", bool_switch (&updateListFlag), "list URL need refresh")
("extractDir,d", value<bfs::path> (&extractDir)->default_value (extractDir), "set tmp directory name for extraction") ("extractDir,d", value<bfs::path> (&extractDir)->default_value (extractDir), "set tmp directory name for extraction")
("archiveDownloadURL,a", value<string> (&archiveDownloadURL)->default_value (archiveDownloadURL), "set url root web site to get bundle (like https://file.kaz.bzh/t.php?)")
; ;
hide.add_options () hide.add_options ()
@ -203,7 +202,7 @@ main (int argc, char** argv) {
cerr << attachment; cerr << attachment;
if (updateListFlag) { if (updateListFlag) {
// update // case update
mbox.open (inputName); mbox.open (inputName);
attachment.getUpdatedURL (mbox); attachment.getUpdatedURL (mbox);
showTime ("Find old links"); showTime ("Find old links");
@ -211,7 +210,7 @@ main (int argc, char** argv) {
} }
if (outputName.empty ()) { if (outputName.empty ()) {
// extract // case extract
attachment.setExtractDir (extractDir); attachment.setExtractDir (extractDir);
mbox.open (inputName); mbox.open (inputName);
attachment.extract (mbox, minAttachSize); attachment.extract (mbox, minAttachSize);
@ -219,9 +218,7 @@ main (int argc, char** argv) {
return 0; return 0;
} }
// substitute // case substitute
if (archiveDownloadURL.length ())
attachment.setArchiveDownloadURL (archiveDownloadURL);
mbox.open (inputName); mbox.open (inputName);
ofstream outbox (outputName); ofstream outbox (outputName);
attachment.substitute (mbox, outbox, minAttachSize); attachment.substitute (mbox, outbox, minAttachSize);

View File

@ -103,8 +103,8 @@ namespace kaz {
bool toExtract, toUpdate, toDisclaim; bool toExtract, toUpdate, toDisclaim;
/*! id of an image embedded in mbox */ /*! id of an image embedded in mbox */
string cid; string cid;
/*! url to replace the attachment and its short id */ /*! url to replace the attachment */
string downloadUrl, downloadId; string downloadUrl;
/*! properties of embedded image (self encoded with base64)*/ /*! properties of embedded image (self encoded with base64)*/
vector<EmbeddedData> embeddedData; vector<EmbeddedData> embeddedData;

View File

@ -50,7 +50,7 @@ namespace kaz {
int imgIdx; int imgIdx;
/*! extracted in first pass */ /*! extracted in first pass */
string contentType, name; string contentType, name;
string downloadUrl, downloadId; string downloadUrl;
/*! area of base64 relative in the image section */ /*! area of base64 relative in the image section */
string::size_type startData, dataLength; string::size_type startData, dataLength;

View File

@ -56,13 +56,14 @@ namespace kaz {
/*! copy a slice of mbox to stdout */ /*! copy a slice of mbox to stdout */
static void copy (ifstream &mbox, ofstream &outbox, const streamoff &begin, const streamoff &end); static void copy (ifstream &mbox, ofstream &outbox, const streamoff &begin, const streamoff &end);
/*! get url and id (space separated) from stdin */ /*! get url from stdin */
void fillUrlId (string &url, string &id); void readDownloadUrl (string &url);
/*! get archive url from stdin */
void readArchiveUrl ();
/*! location of extracted files */ /*! location of extracted files */
void setExtractDir (const bfs::path &extractDir); void setExtractDir (const bfs::path &extractDir);
/*! URL base for archive download of all extracted files */
void setArchiveDownloadURL (const string &archiveDownloadURL);
/*! add a single link in disclaim */ /*! add a single link in disclaim */
void addLink (string &plain, string &html, const string &url, const string &name) const; void addLink (string &plain, string &html, const string &url, const string &name) const;
/*! get disclaim according alls links (retreived or create) */ /*! get disclaim according alls links (retreived or create) */
@ -80,7 +81,7 @@ namespace kaz {
/*! dir path for extraction */ /*! dir path for extraction */
bfs::path extractDir; bfs::path extractDir;
/*! URL base for download archives */ /*! URL for download archives */
string archiveDownloadURL; string archiveDownloadURL;
/*! subset in the tree of all attachments to be consider for extraction or modification */ /*! subset in the tree of all attachments to be consider for extraction or modification */