Yanz Mini Shell
[_]
[-]
[X]
[
HomeShell 1
] [
HomeShell 2
] [
Upload
] [
Command Shell
] [
Scripting
] [
About
]
[ Directory ] =>
/
home
admin
web
uyoxpress.com
public_html
Action
[*]
New File
[*]
New Folder
Sensitive File
[*]
/etc/passwd
[*]
/etc/shadow
[*]
/etc/resolv.conf
[
Delete
] [
Edit
] [
Rename
] [
Back
]
PK ! ��s�X X uwimnu ȯ�� #! /bin/sh # Midnight Commander - WIM suport # # Written by: # Vadim Kalinnikov <moose@ylsoftware.com>, # # This file is part of the Midnight Commander. # # It required wimtools: https://wimlib.net/ # On Debian/Ubuntu wimtools can be installed via: # apt install wimtools which wimlib-imagex 2>/dev/null > /dev/null || exit 1 WIM=`which wimlib-imagex` AWK=awk [ -n "$2" ] || exit 1 ACTION=$1 WIMFILENAME=$2 mcwim_list() { # Here we can use "Image count" from output, # but on some borken images we can get garbage, instead of number IMAGECOUNT=`${WIM} info ${WIMFILENAME} | grep Index: | grep -v Boot | wc -l` IMGNUM=1 VUID=`id -nu` VGID=`id -ng` while [ ${IMGNUM} -le ${IMAGECOUNT} ]; do ${WIM} dir ${WIMFILENAME} ${IMGNUM} --detailed | \ ${AWK} -v uid=${VUID} -v gid=${VGID} -v imgnum=${IMGNUM} ' /^----------------------------------------------------------------------------/,/^$/ { if (match($0, /^Full Path/)) { split($0, namesrc, "\""); name=namesrc[2]; } if (match($0, /FILE_ATTRIBUTE_DIRECTORY is set/)) { attr="drwxr-xr-x" } if (match($0, /^Uncompressed size/)) { size=$4; } if (match($0, /^Last Write Time/)) { months["Jan"] = "01"; months["Feb"] = "02"; months["Mar"] = "03"; months["Apr"] = "04"; months["May"] = "05"; months["Jun"] = "06"; months["Jul"] = "07"; months["Aug"] = "08"; months["Sep"] = "09"; months["Oct"] = "10"; months["Nov"] = "11"; months["Dec"] = "12"; split($0, mtimesrc, " "); mtime=sprintf("%s/%s/%s %s", months[mtimesrc[6]], mtimesrc[7], mtimesrc[9], mtimesrc[8]); } if (match($0, /^$/)) { printf("%s 1 %s %s % 20s % 24s IMAGE%s%s\n", attr, uid, gid, size, mtime, imgnum, name); name = size = mtime = ""; attr="-rw-r--r--"; } } ' IMGNUM=$((IMGNUM+1)) done # Virtual files echo "-r-xr-xr-x 1 ${VUID} ${VGID} 1 01/01/2020 00:00:00 OPTIMIZE" echo "-r-xr-xr-x 1 ${VUID} ${VGID} 1 01/01/2020 00:00:00 VERIFY" } mcwim_copyout() { # Virtual files if [ "${FILENAMESRC}" = "OPTIMIZE" ]; then echo "#/bin/sh" > ${FILENAMEDST} echo "# Run this to optimize archive" >> ${FILENAMEDST} echo "${WIM} optimize \"${WIMFILENAME}\"" >> ${FILENAMEDST} chmod a+x ${FILENAMEDST} exit 0; elif [ "${FILENAMESRC}" = "VERIFY" ]; then echo "#/bin/sh" > ${FILENAMEDST} echo "# Run this to verify archive" >> ${FILENAMEDST} echo "${WIM} verify \"${WIMFILENAME}\"" >> ${FILENAMEDST} chmod a+x ${FILENAMEDST} exit 0; fi # Filename must contain imgnum echo ${FILENAMESRC} | grep -E '^IMAGE[0-9]+/' || exit 1 IMGNUM=`echo ${FILENAMESRC} | cut -d '/' -f1 | sed "s/IMAGE//"` REALFILENAME=`echo ${FILENAMESRC} | sed "s/IMAGE${IMGNUM}//"` ${WIM} extract ${WIMFILENAME} ${IMGNUM} ${REALFILENAME} --to-stdout > ${FILENAMEDST} } mcwim_copyin() { # Skip virtual files [ "${FILENAMEDST}" = "OPTIMIZE" ] && exit 1; [ "${FILENAMEDST}" = "VERIFY" ] && exit 1; # Filename must contain imgnum echo ${FILENAMEDST} | grep -E '^IMAGE[0-9]+/' || exit 1 IMGNUM=`echo ${FILENAMEDST} | cut -d '/' -f1 | sed "s/IMAGE//"` REALFILENAME=`echo ${FILENAMEDST} | sed "s/IMAGE${IMGNUM}//"` echo "add \"${FILENAMESRC}\" \"${REALFILENAME}\"" | ${WIM} update ${WIMFILENAME} ${IMGNUM} > /dev/null } mcwim_rm() { # Skip virtual files [ "${FILENAMESRC}" = "OPTIMIZE" ] && exit 0; [ "${FILENAMESRC}" = "VERIFY" ] && exit 0; # Filename must contain imgnum echo ${FILENAMESRC} | grep -E '^IMAGE[0-9]+/' || exit 1 IMGNUM=`echo ${FILENAMESRC} | cut -d '/' -f1 | sed "s/IMAGE//"` REALFILENAME=`echo ${FILENAMESRC} | sed "s/IMAGE${IMGNUM}//"` if [ -z "${REALFILENAME}" ]; then # If user want to remove image ${WIM} delete ${WIMFILENAME} ${IMGNUM} else # remove regular file or directory echo "delete \"${REALFILENAME}\"" | ${WIM} update ${WIMFILENAME} ${IMGNUM} --force --recursive > /dev/null fi } mcwim_run() { case ${RUNFILENAME} in OPTIMIZE) ${WIM} optimize ${WIMFILENAME} exit 0; ;; VERIFY) ${WIM} verify ${WIMFILENAME} exit 0; ;; esac exit 1; } mcwim_mkdir() { # New dirname must contain imgnum echo ${NEWDIRNAME} | grep -E '^IMAGE[0-9]+/' || exit 1 IMGNUM=`echo ${NEWDIRNAME} | cut -d '/' -f1 | sed "s/IMAGE//"` REALDIRNAME=`echo ${NEWDIRNAME} | sed "s/IMAGE${IMGNUM}//"` [ -z "${REALDIRNAME}" ] && exit 1 TMPDIR=`mktemp -d` DSTBASENAME=`basename ${REALDIRNAME}` SRCDIRNAME="${TMPDIR}/${DSTBASENAME}" mkdir -p ${SRCDIRNAME} echo "add \"${SRCDIRNAME}\" \"${REALDIRNAME}\"" | ${WIM} update ${WIMFILENAME} ${IMGNUM} > /dev/null rm -rf ${TMPDIR} } #echo "'$1' '$2' '$3' '$4' '$5'" >> /tmp/mcdebug case "$ACTION" in list) mcwim_list ;; copyout) [ -n "$4" ] || exit 1 FILENAMESRC="$3" FILENAMEDST="$4" mcwim_copyout ;; copyin) [ -n "$4" ] || exit 1 FILENAMEDST="$3" FILENAMESRC="$4" mcwim_copyin ;; rm|rmdir) [ -n "$3" ] || exit 1 FILENAMESRC="$3" mcwim_rm ;; run) [ -n "$3" ] || exit 1 RUNFILENAME="$3" mcwim_run ;; mkdir) [ -n "$3" ] || exit 1 NEWDIRNAME="$3" mcwim_mkdir ;; *) exit 1 ;; esac PK ! ���7 �7 rpmnu ȯ�� #! /bin/sh # VFS-wrapper for RPM (and src.rpm) files # # Copyright (C) 1996-2004,2009 # Free Software Foundation, Inc. # # Written by # Erik Troan <ewt@redhat.com> 1996 # Jakub Jelinek <jj@sunsite.mff.cuni.cz> 1996, 2004 # Tomasz Kłoczko <kloczek@rudy.mif.pg.gda.pl> 1997 # Wojtek Pilorz <wpilorz@bdk.lublin.pl> # 1997: minor changes # Michele Marziani <marziani@fe.infn.it> # 1997: minor changes # Marc Merlin <marcsoft@merlins.org> 1998 # 1998: bug files # Michal Svec <rebel@penguin.cz> 2000 # 2000: locale bugfix # Andrew V. Samoilov <sav@bcs.zp.ua> # 2004: Whitespace(s) & single quote(s) in filename workaround # https://bugzilla.redhat.com/bugzilla/show_bug.cgi?id=64007 # Slava Zanko <slavazanko@gmail.com> # 2009: Totally rewritten. # Alexander Chumachenko <ledest@gmail.com> # 2013: add dependency version output # Denis Silakov <denis.silakov@rosalab.ru> # 2013: tar payload support. # Arkadiusz Miśkiewicz <arekm@maven.pl> # 2013: improve support for EPOCH # add support for PREINPROG/POSTINPROG/PREUNPROG/POSTUNPROG # add support for VERIFYSCRIPTPROG # add support for TRIGGERSCRIPTS/TRIGGERSCRIPTPROG # Jiri Tyr <jiri.tyr@gmail.com> # 2016: add support for PRETRANS/PRETRANSPROG/POSTTRANS/POSTTRANSPROG # # This file is part of the Midnight Commander. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # override any locale for dates unset LC_ALL LC_TIME=C export LC_TIME if rpmbuild --version >/dev/null 2>&1; then RPMBUILD="rpmbuild" else RPMBUILD="rpm" fi if rpm --nosignature --version >/dev/null 2>&1; then RPM="rpm --nosignature" RPMBUILD="$RPMBUILD --nosignature" else RPM="rpm" fi RPM_QUERY_FMT="$RPM -qp --qf" RPM2CPIO="rpm2cpio" SED="sed" param=$1; shift rpm_filename=$1; shift FILEPREF="-r--r--r-- 1 root root " mcrpmfs_getDesription() { $RPM -qip "${rpm_filename}" } mcrpmfs_getAllNeededTags() { $RPM_QUERY_FMT \ "|NAME=%{NAME}"\ "|VERSION=%{VERSION}"\ "|RELEASE=%{RELEASE}"\ "|DISTRIBUTION=%{DISTRIBUTION}"\ "|VENDOR=%{VENDOR}"\ "|DESCRIPTION=%{DESCRIPTION}"\ "|SUMMARY=%{SUMMARY}"\ "|URL=%{URL}"\ "|EPOCH=%{EPOCH}"\ "|LICENSE=%{LICENSE}"\ "|REQUIRES=%{REQUIRENAME} %{REQUIREFLAGS:depflags} %{REQUIREVERSION}"\ "|OBSOLETES=%{OBSOLETES}"\ "|PROVIDES=%{PROVIDES} %{PROVIDEFLAGS:depflags} %{PROVIDEVERSION}"\ "|CONFLICTS=%{CONFLICTS}"\ "|PACKAGER=%{PACKAGER}" \ "${rpm_filename}" \ | tr '\n' ' ' # The newlines in DESCRIPTION mess with the sed script in mcrpmfs_getOneTag(). } mcrpmfs_getRawOneTag() { $RPM_QUERY_FMT "$1" "${rpm_filename}" } mcrpmfs_getOneTag() { # 'echo' can't be used for arbitrary data (see commit message). printf "%s" "$AllTAGS" | $SED "s/.*|${1}=//" | cut -d '|' -f 1 } mcrpmfs_printOneMetaInfo() { if test "$3" = "raw"; then metaInfo=`mcrpmfs_getRawOneTag "%{$2}"` else metaInfo=`mcrpmfs_getOneTag "$2"` fi if test -n "${metaInfo}" -a "${metaInfo}" != "(none)"; then echo "${FILEPREF} 0 ${DATE} ${1}" return 0 fi return 1 } mcrpmfs_list_fastRPM () { echo "$FILEPREF 0 $DATE INFO/DISTRIBUTION" echo "$FILEPREF 0 $DATE INFO/VENDOR" echo "$FILEPREF 0 $DATE INFO/DESCRIPTION" echo "$FILEPREF 0 $DATE INFO/SUMMARY" echo "dr-xr-xr-x 1 root root 0 $DATE INFO/SCRIPTS" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PRETRANS" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTTRANS" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PREIN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTIN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PREUN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTUN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/VERIFYSCRIPT" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/TRIGGERSCRIPTS" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/ALL" echo "$FILEPREF 0 $DATE INFO/PACKAGER" echo "$FILEPREF 0 $DATE INFO/URL" echo "$FILEPREF 0 $DATE INFO/EPOCH" echo "$FILEPREF 0 $DATE INFO/LICENSE" echo "$FILEPREF 0 $DATE INFO/REQUIRES" echo "$FILEPREF 0 $DATE INFO/OBSOLETES" echo "$FILEPREF 0 $DATE INFO/PROVIDES" echo "$FILEPREF 0 $DATE INFO/ENHANCES" echo "$FILEPREF 0 $DATE INFO/SUGGESTS" echo "$FILEPREF 0 $DATE INFO/RECOMMENDS" echo "$FILEPREF 0 $DATE INFO/SUPPLEMENTS" echo "$FILEPREF 0 $DATE INFO/CONFLICTS" echo "$FILEPREF 0 $DATE INFO/CHANGELOG" } mcrpmfs_list_fullRPM () { mcrpmfs_printOneMetaInfo "INFO/DISTRIBUTION" "DISTRIBUTION" mcrpmfs_printOneMetaInfo "INFO/VENDOR" "VENDOR" mcrpmfs_printOneMetaInfo "INFO/DESCRIPTION" "DESCRIPTION" mcrpmfs_printOneMetaInfo "INFO/SUMMARY" "SUMMARY" if test "`mcrpmfs_getRawOneTag \"%{RPMTAG_PRETRANS}%{RPMTAG_POSTTRANS}%{RPMTAG_PREIN}%{RPMTAG_POSTIN}%{RPMTAG_PREUN}%{RPMTAG_POSTUN}%{VERIFYSCRIPT}%{TRIGGERSCRIPTS}\"`" != "(none)(none)(none)(none)(none)(none)(none)(none)"; then mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/PRETRANS" "RPMTAG_PRETRANS" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/POSTTRANS" "RPMTAG_POSTTRANS" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/PREIN" "RPMTAG_PREIN" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/POSTIN" "RPMTAG_POSTIN" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/PREUN" "RPMTAG_PREUN" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/POSTUN" "RPMTAG_POSTUN" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/VERIFYSCRIPT" "VERIFYSCRIPT" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/TRIGGERSCRIPTS" "TRIGGERSCRIPTS" "raw" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/ALL" fi if test "`mcrpmfs_getRawOneTag \"%{RPMTAG_PRETRANSPROG}%{RPMTAG_POSTTRANSPROG}%{RPMTAG_PREINPROG}%{RPMTAG_POSTINPROG}%{RPMTAG_PREUNPROG}%{RPMTAG_POSTUNPROG}%{VERIFYSCRIPTPROG}%{TRIGGERSCRIPTPROG}\"`" != "(none)(none)(none)(none)(none)(none)(none)(none)"; then mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/PRETRANSPROG" "RPMTAG_PRETRANSPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/POSTTRANSPROG" "RPMTAG_POSTTRANSPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/PREINPROG" "RPMTAG_PREINPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/POSTINPROG" "RPMTAG_POSTINPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/PREUNPROG" "RPMTAG_PREUNPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/POSTUNPROG" "RPMTAG_POSTUNPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/VERIFYSCRIPTPROG" "VERIFYSCRIPTPROG" "raw" mcrpmfs_printOneMetaInfo "INFO/SCRIPTS/TRIGGERSCRIPTPROG" "TRIGGERSCRIPTPROG" "raw" fi mcrpmfs_printOneMetaInfo "INFO/PACKAGER" "PACKAGER" mcrpmfs_printOneMetaInfo "INFO/URL" "URL" mcrpmfs_printOneMetaInfo "INFO/EPOCH" "EPOCH" mcrpmfs_printOneMetaInfo "INFO/LICENSE" "LICENSE" mcrpmfs_printOneMetaInfo "INFO/REQUIRES" "REQUIRES" mcrpmfs_printOneMetaInfo "INFO/OBSOLETES" "OBSOLETES" mcrpmfs_printOneMetaInfo "INFO/PROVIDES" "PROVIDES" mcrpmfs_printOneMetaInfo "INFO/CONFLICTS" "CONFLICTS" mcrpmfs_printOneMetaInfo "INFO/CHANGELOG" "CHANGELOGTEXT" "raw" } mcrpmfs_list () { # set MCFASTRPM_DFLT to 1 for faster rpm files handling by default, to 0 for # slower handling MCFASTRPM_DFLT=0 if test -z "$MCFASTRPM"; then MCFASTRPM=$MCFASTRPM_DFLT fi DESC=`mcrpmfs_getDesription 2>/dev/null` || { echo "$FILEPREF 0 "`date +"%b %d %H:%M"`" ERROR" exit 1 } DATE=`mcrpmfs_getRawOneTag "%{BUILDTIME:date}\n" | cut -c 5-11,21-24` PAYLOAD=`mcrpmfs_getRawOneTag "%{PAYLOADFORMAT}\n" | sed s/ustar/tar/` HEADERSIZE=`printf '%s\n' "$DESC" | wc -c` # 'echo' can't be used for arbitrary data (see commit message). printf '%s %s %s HEADER\n' "${FILEPREF}" "${HEADERSIZE}" "${DATE}" echo "-r-xr-xr-x 1 root root 0 $DATE INSTALL" case "${rpm_filename}" in *.src.rpm) echo "-r-xr-xr-x 1 root root 0 $DATE REBUILD" ;; *) echo "-r-xr-xr-x 1 root root 0 $DATE UPGRADE" ;; esac echo "dr-xr-xr-x 3 root root 0 $DATE INFO" if [ `mcrpmfs_getRawOneTag "%{EPOCH}"` = "(none)" ]; then echo "$FILEPREF 0 $DATE INFO/NAME-VERSION-RELEASE" else echo "$FILEPREF 0 $DATE INFO/NAME-EPOCH:VERSION-RELEASE" fi echo "$FILEPREF 0 $DATE INFO/GROUP" echo "$FILEPREF 0 $DATE INFO/BUILDHOST" echo "$FILEPREF 0 $DATE INFO/SOURCERPM" echo "$FILEPREF 0 $DATE INFO/BUILDTIME" echo "$FILEPREF 0 $DATE INFO/RPMVERSION" echo "$FILEPREF 0 $DATE INFO/OS" echo "$FILEPREF 0 $DATE INFO/SIZE" if test "$MCFASTRPM" = 0 ; then mcrpmfs_list_fullRPM else mcrpmfs_list_fastRPM fi echo "$FILEPREF 0 $DATE CONTENTS.$PAYLOAD" } mcrpmfs_copyout () { case "$1" in HEADER) mcrpmfs_getDesription > "$2"; exit 0;; INSTALL) echo "# Run this to install this RPM package" > "$2" exit 0 ;; UPGRADE) echo "# Run this to upgrade this RPM package" > "$2" exit 0 ;; REBUILD) echo "# Run this to rebuild this RPM package" > "$2" exit 0 ;; ERROR) mcrpmfs_getDesription > /dev/null 2> "$2"; exit 0;; INFO/NAME-VERSION-RELEASE) echo `mcrpmfs_getOneTag "NAME"`-`mcrpmfs_getOneTag "VERSION"`-`mcrpmfs_getOneTag "RELEASE"` > "$2" exit 0 ;; INFO/NAME-EPOCH:VERSION-RELEASE) echo `mcrpmfs_getOneTag "NAME"`-`mcrpmfs_getOneTag "EPOCH"`:`mcrpmfs_getOneTag "VERSION"`-`mcrpmfs_getOneTag "RELEASE"` > "$2" exit 0 ;; INFO/RELEASE) mcrpmfs_getOneTag "RELEASE" > "$2"; exit 0;; INFO/GROUP) mcrpmfs_getRawOneTag "%{GROUP}\n" > "$2"; exit 0;; INFO/DISTRIBUTION) mcrpmfs_getOneTag "DISTRIBUTION" > "$2"; exit 0;; INFO/VENDOR) mcrpmfs_getOneTag "VENDOR" > "$2"; exit 0;; INFO/BUILDHOST) mcrpmfs_getRawOneTag "%{BUILDHOST}\n" > "$2"; exit 0;; INFO/SOURCERPM) mcrpmfs_getRawOneTag "%{SOURCERPM}\n" > "$2"; exit 0;; INFO/DESCRIPTION) mcrpmfs_getRawOneTag "%{DESCRIPTION}\n" > "$2"; exit 0;; INFO/PACKAGER) mcrpmfs_getOneTag "PACKAGER" > "$2"; exit 0;; INFO/URL) mcrpmfs_getOneTag "URL" >"$2"; exit 0;; INFO/BUILDTIME) mcrpmfs_getRawOneTag "%{BUILDTIME:date}\n" >"$2"; exit 0;; INFO/EPOCH) mcrpmfs_getOneTag "EPOCH" >"$2"; exit 0;; INFO/LICENSE) mcrpmfs_getOneTag "LICENSE" >"$2"; exit 0;; INFO/RPMVERSION) mcrpmfs_getRawOneTag "%{RPMVERSION}\n" >"$2"; exit 0;; INFO/REQUIRES) mcrpmfs_getRawOneTag "[%{REQUIRENAME} %{REQUIREFLAGS:depflags} %{REQUIREVERSION}\n]" >"$2"; exit 0;; INFO/ENHANCES) mcrpmfs_getRawOneTag "[%|ENHANCESFLAGS:depflag_strong?{}:{%{ENHANCESNAME} %{ENHANCESFLAGS:depflags} %{ENHANCESVERSION}\n}|]" "$f" >"$3"; exit 0;; INFO/SUGGESTS) mcrpmfs_getRawOneTag "[%|SUGGESTSFLAGS:depflag_strong?{}:{%{SUGGESTSNAME} %{SUGGESTSFLAGS:depflags} %{SUGGESTSVERSION}\n}|]" "$f" >"$3"; exit 0;; INFO/RECOMMENDS) mcrpmfs_getRawOneTag "[%|SUGGESTSFLAGS:depflag_strong?{%{SUGGESTSNAME} %{SUGGESTSFLAGS:depflags} %{SUGGESTSVERSION}\n}|]" "$f" >"$3"; exit 0;; INFO/SUPPLEMENTS) mcrpmfs_getRawOneTag "[%|ENHANCESFLAGS:depflag_strong?{%{ENHANCESNAME} %{ENHANCESFLAGS:depflags} %{ENHANCESVERSION}\n}|]" "$f" >"$3"; exit 0;; INFO/PROVIDES) mcrpmfs_getRawOneTag "[%{PROVIDES} %{PROVIDEFLAGS:depflags} %{PROVIDEVERSION}\n]" >"$2"; exit 0;; INFO/SCRIPTS/PRETRANS) mcrpmfs_getRawOneTag "%{RPMTAG_PRETRANS}\n" >"$2"; exit 0;; INFO/SCRIPTS/PRETRANSPROG) mcrpmfs_getRawOneTag "%{RPMTAG_PRETRANSPROG}\n" >"$2"; exit 0;; INFO/SCRIPTS/POSTTRANS) mcrpmfs_getRawOneTag "%{RPMTAG_POSTTRANS}\n" >"$2"; exit 0;; INFO/SCRIPTS/POSTTRANSPROG) mcrpmfs_getRawOneTag "%{RPMTAG_POSTTRANSPROG}\n" >"$2"; exit 0;; INFO/SCRIPTS/PREIN) mcrpmfs_getRawOneTag "%{RPMTAG_PREIN}\n" >"$2"; exit 0;; INFO/SCRIPTS/PREINPROG) mcrpmfs_getRawOneTag "%{RPMTAG_PREINPROG}\n" >"$2"; exit 0;; INFO/SCRIPTS/POSTIN) mcrpmfs_getRawOneTag "%{RPMTAG_POSTIN}\n" >"$2"; exit 0;; INFO/SCRIPTS/POSTINPROG) mcrpmfs_getRawOneTag "%{RPMTAG_POSTINPROG}\n" >"$2"; exit 0;; INFO/SCRIPTS/PREUN) mcrpmfs_getRawOneTag "%{RPMTAG_PREUN}\n" >"$2"; exit 0;; INFO/SCRIPTS/PREUNPROG) mcrpmfs_getRawOneTag "%{RPMTAG_PREUNPROG}\n" >"$2"; exit 0;; INFO/SCRIPTS/POSTUN) mcrpmfs_getRawOneTag "%{RPMTAG_POSTUN}\n" >"$2"; exit 0;; INFO/SCRIPTS/POSTUNPROG) mcrpmfs_getRawOneTag "%{RPMTAG_POSTUNPROG}\n" >"$2"; exit 0;; INFO/SCRIPTS/VERIFYSCRIPT) mcrpmfs_getRawOneTag "%{VERIFYSCRIPT}\n" > "$2"; exit 0;; INFO/SCRIPTS/VERIFYSCRIPTPROG) mcrpmfs_getRawOneTag "%{VERIFYSCRIPTPROG}\n" > "$2"; exit 0;; INFO/SCRIPTS/TRIGGERSCRIPTS) $RPM -qp --triggers "${rpm_filename}" > "$2"; exit 0;; INFO/SCRIPTS/TRIGGERSCRIPTPROG) mcrpmfs_getRawOneTag "%{TRIGGERSCRIPTPROG}\n" > "$2"; exit 0;; INFO/SCRIPTS/ALL) $RPM -qp --scripts "${rpm_filename}" > "$2"; exit 0;; INFO/SUMMARY) mcrpmfs_getRawOneTag "%{SUMMARY}\n" > "$2"; exit 0;; INFO/OS) mcrpmfs_getRawOneTag "%{OS}\n" > "$2"; exit 0;; INFO/CHANGELOG) mcrpmfs_getRawOneTag "[* %{CHANGELOGTIME:date} %{CHANGELOGNAME}\n%{CHANGELOGTEXT}\n\n]\n" > "$2"; exit 0;; INFO/SIZE) mcrpmfs_getRawOneTag "%{SIZE} bytes\n" > "$2"; exit 0;; INFO/OBSOLETES) mcrpmfs_getRawOneTag "[%{OBSOLETENAME} %|OBSOLETEFLAGS?{%{OBSOLETEFLAGS:depflags} %{OBSOLETEVERSION}}:{}|\n]" > "$2"; exit 0;; INFO/CONFLICTS) mcrpmfs_getRawOneTag "[%{CONFLICTNAME} %{CONFLICTFLAGS:depflags} %{CONFLICTVERSION}\n]" >"$2"; exit 0;; CONTENTS.*) $RPM2CPIO "${rpm_filename}" > "$2"; exit 0;; *) ;; esac } mcrpmfs_run () { case "$1" in INSTALL) echo "Installing \"${rpm_filename}\""; $RPM -ivh "${rpm_filename}"; exit 0;; UPGRADE) echo "Upgrading \"${rpm_filename}\""; $RPM -Uvh "${rpm_filename}"; exit 0;; REBUILD) echo "Rebuilding \"${rpm_filename}\""; $RPMBUILD --rebuild "${rpm_filename}"; exit 0;; esac } # Let the test framework override functions and variables. [ -n "$MC_TEST_RPM_REWRITE" ] && . "$MC_TEST_RPM_REWRITE" AllTAGS=`mcrpmfs_getAllNeededTags "$1"` umask 077 case "${param}" in list) mcrpmfs_list; exit 0;; copyout) mcrpmfs_copyout "$1" "$2"; exit 0;; run) mcrpmfs_run "$1"; exit 1;; esac exit 1 PK ! TGZ{ changesetfsnu ȯ�� #!/bin/sh LANG=C export LANG LC_TIME=C export LC_TIME # --- GIT ----------------------------------------------------------------------- found_git_dir() { work_dir=$1 while [ -n "$work_dir" -a "$work_dir" != "/" ]; do [ -d "${work_dir}/.git" ] && { echo "${work_dir}/.git/" return } work_dir=`dirname "$work_dir"` done echo '' } changesetfs_list_git() { WORK_DIR=$1; shift fname=$1; shift USER=$1; shift DATE=$1; shift GIT_DIR=`found_git_dir "$WORK_DIR"` [ -z "$GIT_DIR" ] && GIT_DIR=$WORK_DIR curr_year=`date +"%Y"` git --git-dir="$GIT_DIR" log --abbrev=7 --pretty="format:%at %h %an" -- "$fname" | while read TIMESTAMP chset author do year=`date -d @"$TIMESTAMP" +"%Y"` [ "$year" = "$curr_year" ] && { DATE=`date -d @"$TIMESTAMP" +"%b %d %H:%M"` } || { DATE=`date -d @"$TIMESTAMP" +"%b %d %Y"` } NAME="$chset $author" echo "-rw-rw-rw- 1 $USER 0 0 $DATE $NAME `basename $fname`" done } changesetfs_copyout_git() { WORK_DIR=$1; shift fname=$1; shift orig_fname=$1;shift output_fname=$1;shift chset=`echo "$orig_fname"| cut -f 1 -d " "` GIT_DIR=`found_git_dir "$WORK_DIR"` [ -z "$GIT_DIR" ] && GIT_DIR=$WORK_DIR filecommit=`git --git-dir="$GIT_DIR" show --raw --pretty=tformat:%h "$chset" -- "$fname"| \ tail -n1 | \ sed 's@^::[0-9]*\s*[0-9]*\s*[0-9]*\s*@@' | \ sed 's@^:[0-9]*\s*[0-9]*\s*@@' | \ cut -d'.' -f 1` git --git-dir="$GIT_DIR" show "$filecommit" > "$output_fname" } # --- COMMON -------------------------------------------------------------------- changesetfs_list() { VCS_type=$1; shift WORK_DIR=$1; shift fname=$1; shift DATE=`date +"%b %d %H:%M"` USER=`whoami` case "$VCS_type" in git) changesetfs_list_git "$WORK_DIR" "$fname" "$USER" "$DATE" ;; esac } changesetfs_copyout() { VCS_type=$1; shift WORK_DIR=$1; shift fname=$1; shift case "$VCS_type" in git) changesetfs_copyout_git "$WORK_DIR" "$fname" "$@" ;; esac } # --- MAIN ---------------------------------------------------------------------- command=$1; shift tmp_file=$1; shift WORK_DIR=`head -n1 $tmp_file` fname=`tail -n2 $tmp_file | head -n1` VCS_type=`tail -n1 $tmp_file` case "$command" in list) changesetfs_list "$VCS_type" "$WORK_DIR" "$fname" ;; copyout) changesetfs_copyout "$VCS_type" "$WORK_DIR" "$fname" "$@" ;; *) exit 1 ;; esac exit 0 PK ! �nD , , patchfsnu ȯ�� #! /usr/bin/perl -w # # Written by Adam Byrtek <alpha@debian.org>, 2002 # Rewritten by David Sterba <dave@jikos.cz>, 2009 # # Extfs to handle patches in context and unified diff format. # Known issues: When name of file to patch is modified during editing, # hunk is duplicated on copyin. It is unavoidable. use bytes; use strict; use POSIX; use File::Temp 'tempfile'; # standard binaries my $lzip = 'lzip'; my $lz4 = 'lz4'; my $lzma = 'lzma'; my $xz = 'xz'; my $zstd = 'zstd'; my $bzip = 'bzip2'; my $gzip = 'gzip'; my $fileutil = 'file -b'; # date parsing requires Date::Parse from TimeDate module my $parsedates = eval 'require Date::Parse'; # regular expressions my $unified_header=qr/^--- .*\t.*\n\+\+\+ .*\t.*\n$/; my $unified_extract=qr/^--- ([^\t]+).*\n\+\+\+ ([^\t]+)\s*(.*)\n/; my $unified_header2=qr/^--- .*\n\+\+\+ .*\n$/; my $unified_extract2=qr/^--- ([^\s]+).*\n\+\+\+ ([^\s]+)\s*(.*)\n/; my $unified_contents=qr/^([+\-\\ \n]|@@ .* @@)/; my $unified_hunk=qr/@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@.*\n/; my $context_header=qr/^\*\*\* .*\t.*\n--- .*\t.*\n$/; my $context_extract=qr/^\*\*\* ([^\t]+).*\n--- ([^\t]+)\s*(.*)\n/; my $context_header2=qr/^\*\*\* .*\n--- .*\n$/; my $context_extract2=qr/^\*\*\* ([^\s]+).*\n--- ([^\s]+)\s*(.*)\n/; my $context_contents=qr/^([!+\-\\ \n]|-{3} .* -{4}|\*{3} .* \*{4}|\*{15})/; my $ls_extract_id=qr/^[^\s]+\s+[^\s]+\s+([^\s]+)\s+([^\s]+)/; my $basename=qr|^(.*/)*([^/]+)$|; sub patchfs_canonicalize_path ($) { my ($fname) = @_; $fname =~ s,/+,/,g; $fname =~ s,(^|/)(?:\.?\./)+,$1,; return $fname; } # output unix date in a mc-readable format sub timef { my @time=localtime($_[0]); return sprintf '%02d-%02d-%02d %02d:%02d', $time[4]+1, $time[3], $time[5]+1900, $time[2], $time[1]; } # parse given string as a date and return unix time sub datetime { # in case of problems fall back to 0 in unix time # note: str2time interprets some wrong values (eg. " ") as 'today' if ($parsedates && defined (my $t=str2time($_[0]))) { return timef($t); } return timef(time); } # print message on stderr and exit sub error { print STDERR $_[0], "\n"; exit 1; } # (compressed) input sub myin { my ($qfname)=(quotemeta $_[0]); $_=`$fileutil $qfname`; if (/^'*lz4/) { return "$lz4 -dc $qfname"; } elsif (/^'*lzip/) { return "$lzip -dc $qfname"; } elsif (/^'*lzma/) { return "$lzma -dc $qfname"; } elsif (/^'*xz/) { return "$xz -dc $qfname"; } elsif (/^'*zst/) { return "$zstd -dc $qfname"; } elsif (/^'*bzip/) { return "$bzip -dc $qfname"; } elsif (/^'*gzip/) { return "$gzip -dc $qfname"; } else { return "cat $qfname"; } } # (compressed) output sub myout { my ($qfname,$append)=(quotemeta $_[0],$_[1]); my ($sep) = $append ? '>>' : '>'; $_=`$fileutil $qfname`; if (/^'*lz4/) { return "$lz4 -c $sep $qfname"; } elsif (/^'*lzip/) { return "$lzip -c $sep $qfname"; } elsif (/^'*lzma/) { return "$lzma -c $sep $qfname"; } elsif (/^'*xz/) { return "$xz -c $sep $qfname"; } elsif (/^'*zst/) { return "$zstd -c $sep $qfname"; } elsif (/^'*bzip/) { return "$bzip -c $sep $qfname"; } elsif (/^'*gzip/) { return "$gzip -c $sep $qfname"; } else { return "cat $sep $qfname"; } } # select diff filename conforming with rules found in diff.info sub diff_filename { my ($fsrc,$fdst)= @_; # TODO: can remove these two calls later $fsrc = patchfs_canonicalize_path ($fsrc); $fdst = patchfs_canonicalize_path ($fdst); if (!$fdst && !$fsrc) { error 'Index: not yet implemented'; } elsif (!$fsrc || $fsrc eq '/dev/null') { return ($fdst,'PATCH-CREATE/'); } elsif (!$fdst || $fdst eq '/dev/null') { return ($fsrc,'PATCH-REMOVE/'); } elsif (($fdst eq '/dev/null') && ($fsrc eq '/dev/null')) { error 'Malformed diff, missing a sane filename'; } else { # fewest path name components if ($fdst=~s|/|/|g < $fsrc=~s|/|/|g) { return ($fdst,''); } elsif ($fdst=~s|/|/|g > $fsrc=~s|/|/|g) { return ($fsrc,''); } else { # shorter base name if (($fdst=~/$basename/o,length $2) < ($fsrc=~/$basename/o,length $2)) { return ($fdst,''); } elsif (($fdst=~/$basename/o,length $2) > ($fsrc=~/$basename/o,length $2)) { return ($fsrc,''); } else { # shortest names if (length $fdst < length $fsrc) { return ($fdst,''); } else { return ($fsrc,''); } } } } } # IN: diff "archive" name # IN: file handle for output; STDIN for list, tempfile else # IN: filename to watch (for: copyout, rm), '' for: list # IN: remove the file? # true - ... and print out the rest # false - ie. copyout mode, print just the file sub parse($$$$) { my $archive=quotemeta shift; my $fh=shift; my $file=shift; my $rmmod=shift; my ($state,$fsize,$time); my ($f,$fsrc,$fdst,$prefix); my ($unified,$context); my ($skipread, $filetoprint, $filefound); my ($h_add,$h_del,$h_ctx); # hunk line counts my ($h_r1,$h_r2); # hunk ranges my @outsrc; # if desired ... my @outdst; my $line; my %fmap_size=(); my %fmap_time=(); import Date::Parse if ($parsedates && $file eq ''); $line=1; $state=0; $fsize=0; $f=''; $filefound=0; while ($skipread || ($line++,$_=<I>)) { $skipread=0; if($state == 0) { # expecting comments $unified=$context=0; $unified=1 if (/^--- /); $context=1 if (/^\*\*\* /); if (!$unified && !$context) { $filefound=0 if($file ne '' && $filetoprint); # shortcut for rmmod xor filefound # - in rmmod we print if not found # - in copyout (!rmmod) we print if found print $fh $_ if($rmmod != $filefound); next; } if($file eq '' && $filetoprint) { $fmap_size{"$prefix$f"}+=$fsize; $fmap_time{"$prefix$f"}=$time; } # start of new file $_ .=<I>; # steal next line, both formats $line++; if($unified) { if(/$unified_header/o) { ($fsrc,$fdst,$time) = /$unified_extract/o; } elsif(/$unified_header2/o) { ($fsrc,$fdst,$time) = /$unified_extract2/o; } else { error "Can't parse unified diff header"; } } elsif($context) { if(/$context_header/o) { ($fsrc,$fdst,$time) = /$context_extract/o; } elsif(/$context_header2/o) { ($fsrc,$fdst,$time) = /$context_extract2/o; } else { error "Can't parse context diff header"; } } else { error "Unrecognized diff header"; } $fsrc=patchfs_canonicalize_path($fsrc); $fdst=patchfs_canonicalize_path($fdst); if(wantarray) { push @outsrc,$fsrc; push @outdst,$fdst; } ($f,$prefix)=diff_filename($fsrc,$fdst); $filefound=($f eq $file); $f="$f.diff"; $filetoprint=1; $fsize=length; print $fh $_ if($rmmod != $filefound); $state=1; } elsif($state == 1) { # expecting diff hunk headers, end of file or comments if($unified) { my ($a,$b,$c,$d); ($a,$b,$h_r1,$c,$d,$h_r2)=/$unified_hunk/o; if(!defined($a) || !defined($c)) { # hunk header does not come, a comment inside # or maybe a new file, state 0 will decide $skipread=1; $state=0; next; } $fsize+=length; print $fh $_ if($rmmod != $filefound); $h_r1=1 if(!defined($b)); $h_r2=1 if(!defined($d)); $h_add=$h_del=$h_ctx=0; $state=2; } elsif($context) { if(!/$context_contents/o) { $skipread=1; $state=0; next; } print $fh $_ if($rmmod != $filefound); $fsize+=length; } } elsif($state == 2) { # expecting hunk contents if($h_del + $h_ctx == $h_r1 && $h_add + $h_ctx == $h_r2) { # hooray, end of hunk # we optimistically ended with a hunk before but # the line has been read already $skipread=1; $state=1; next; } print $fh $_ if($rmmod != $filefound); $fsize+=length; my ($first)= /^(.)/; if(ord($first) == ord('+')) { $h_add++; } elsif(ord($first) == ord('-')) { $h_del++; } elsif(ord($first) == ord(' ')) { $h_ctx++; } elsif(ord($first) == ord('\\')) { 0; } elsif(ord($first) == ord('@')) { error "Malformed hunk, header came too early"; } else { error "Unrecognized character in hunk"; } } } if($file eq '' && $filetoprint) { $fmap_size{"$prefix$f"}+=$fsize; $fmap_time{"$prefix$f"}=$time; } # use uid and gid from file my ($uid,$gid)=(`ls -l $archive`=~/$ls_extract_id/o); # flush all file names with cumulative file size while(my ($fn, $fs) = each %fmap_size) { printf $fh "-rw-r--r-- 1 %s %s %d %s %s\n", $uid, $gid, $fs, datetime($fmap_time{$fn}), $fn; } close($fh) if($file ne ''); return \(@outsrc, @outdst) if wantarray; } # list files affected by patch sub list($) { parse($_[0], *STDOUT, '', 0); close(I); } # extract diff from patch # IN: diff file to find # IN: output file name sub copyout($$) { my ($file,$out)=@_; $file=~s/^(PATCH-(CREATE|REMOVE)\/)?(.*)\.diff$/$3/; $file = patchfs_canonicalize_path ($file); open(FH, ">$out") or error("Cannot open output file"); parse('', *FH, $file, 0); } # remove diff(s) from patch # IN: archive # IN: file to delete sub rm($$) { my $archive=shift; my ($tmp,$tmpname)=tempfile(); @_=map {scalar(s/^(PATCH-(CREATE|REMOVE)\/)?(.*)\.diff$/$3/,$_)} @_; # just the first file for now parse($archive, $tmp, $_[0], 1); close I; # replace archive system("cat \Q$tmpname\E | " . myout($archive,0))==0 or error "Can't write to archive"; system("rm -f -- \Q$tmpname\E"); } # append diff to archive # IN: diff archive name # IN: newly created file name in archive # IN: the real source file sub copyin($$$) { # TODO: seems to be tricky. what to do? # copyin of file which is already there may: # * delete the original and copy only the new # * just append the new hunks to the same file # problems: may not be a valid diff, unmerged hunks # * try to merge the two together # ... but we do not want write patchutils again, right? error "Copying files into diff not supported"; return; my ($archive,$name,$src)=@_; # in case we are appending another diff, we have # to delete/merge all the files open(DEVNULL, ">/dev/null"); open I, myin($src).'|'; my ($srclist,$dstlist)=parse($archive, *DEVNULL, '', 0); close(I); close(DEVNULL); foreach(@$srclist) { print("SRC: del $_\n"); } foreach(@$dstlist) { print("DST: del $_\n"); } return; # remove overwritten file open I, myin($archive).'|'; rm ($archive, $name); close I; my $cmd1=myin("$src.diff"); my $cmd2=myout($archive,1); system("$cmd1 | $cmd2")==0 or error "Can't write to archive"; } my $fin = $ARGV[1]; # resolve symlink while (-l $fin) { $fin = readlink $fin; } if ($ARGV[0] eq 'list') { open I, myin($fin).'|'; list ($fin); exit 0; } elsif ($ARGV[0] eq 'copyout') { open I, myin($fin)."|"; copyout ($ARGV[2], $ARGV[3]); exit 0; } elsif ($ARGV[0] eq 'rm') { open I, myin($fin)."|"; rm ($fin, $ARGV[2]); exit 0; } elsif ($ARGV[0] eq 'rmdir') { exit 0; } elsif ($ARGV[0] eq 'mkdir') { exit 0; } elsif ($ARGV[0] eq 'copyin') { copyin ($fin, $ARGV[2], $ARGV[3]); exit 0; } exit 1; PK ! ���t t READMEnu �[��� Writing scripts for Midnight Commander's external vfs IMPORTANT NOTE: There may be some bugs left in extfs. Enjoy. Starting with version 3.1, the Midnight Commander comes with so called extfs, which is one of the virtual filesystems. This system makes it possible to create new virtual filesystems for the GNU MC very easily. To handle requests, create a shell/perl/python/etc script/program (with executable permissions) in $(libexecdir)/mc/extfs.d or in ~/.local/share/mc/extfs.d/. (Note: $(libexecdir) should be substituted for actual libexecdir path stored when configured or compiled, like /usr/local/libexec or /usr/libexec). Assign a vfs suffix. For example, if you have .zip file, and would like to see what's inside it, path will be /anypath/my.zip/uzip://some_path/... In this example, .zip is suffix, but I call vfs 'uzip'. Why? Well, what this vfs essentially does is UNzip. UN is too long, so I choosed U. Note that sometime in future filesystem like zip may exist: It will take whole tree and create .zip file from it. So /usr/zip:// will be zipfile containing whole /usr tree. If your vfs does not require file to work on, add '+' to the end of name. Note, that trailing '+' in file name is not a part of vfs name, it is just an vfs attribue. So you have not use it in vfs commands: cd rpms:// is correct command, and cd rpms+:// is incorrect command. * Commands that should be implemented by your shell script ---------------------------------------------------------- Return zero from your script upon completion of the command, otherwise nonzero for failure or in case of an unsupported command. $libdir/extfs/prefix command [arguments] * Command: list archivename This command should list the complete archive content in the following format (a little modified ls -l listing): AAAAAAA NNN OOOOOOOO GGGGGGGG SSSSSSSS DATETIME [PATH/]FILENAME [-> [PATH/]FILENAME[/]]] where (things in [] are optional): AAAAAAA is the permission string like in ls -l NNN is the number of links OOOOOOOO is the owner (either UID or name) GGGGGGGG is the group (either GID or name) SSSSSSSS is the file size FILENAME is the filename PATH is the path from the archive's root without the leading slash (/) DATETIME has one of the following formats: Mon DD hh:mm[:ss], Mon DD YYYY, MM-DD-YYYY hh:mm[:ss] where Mon is a three letter English month name, DD is day 01-31 (can be 1-31, if following Mon), MM is month 01-12, YYYY is four digit year, hh is hours, mm is minutes, and ss is optional seconds. If the -> [PATH/]FILENAME part is present, it means: If permissions start with an l (ell), then it is the name that symlink points to. (If this PATH starts with a MC vfs prefix, then it is a symlink somewhere to the other virtual filesystem (if you want to specify path from the local root, use local:/path_name instead of /path_name, since /path_name means from root of the archive listed). If permissions do not start with l, but number of links is greater than one, then it says that this file should be a hardlinked with the other file. The result of list command must not contain "." and ".." items. * Command: copyout archivename storedfilename extractto This should extract from archive archivename the file called storedfilename (possibly with path if not located in archive's root [this is wrong. current extfs strips paths! -- pavel@ucw.cz]) to file extractto. * Command: copyin archivename storedfilename sourcefile This should add to the archivename the sourcefile with the name storedfilename inside the archive. Important note: archivename in the above examples may not have the extension you are expecting to have, like it may happen that archivename will be something like /tmp/f43513254 or just anything. Some archivers do not like it, so you'll have to find some workaround. * Command: rm archivename storedfilename This should remove storedfilename from archivename. * Command: mkdir archivename dirname This should create a new directory called dirname inside archivename. * Command: rmdir archivename dirname This should remove an existing directory dirname. If the directory is not empty, mc will recursively delete it (possibly prompting). * Command: run Undocumented :-) --------------------------------------------------------- Don't forget to mark this file executable (chmod 755 ThisFile, for example) For skeleton structure of executable, look at some of filesystems similar to yours. --------------------------------------------------------- In constructing these routines, errors will be made, and mc will not display a malformed printing line. That can lead the programmer down many false trails in search of the bug. Since this routine is an executable shell script it can be run from the command line independently of mc, and its output will show on the console or can be redirected to a file. * Putting it to use ---------------------------------------------------------- The file .mc.ext in a home directory, and in mc's user directory (commonly /etc/mc), contains instructions for operations on files depending on filename extensions. It is well documented in other files in this distribution, so here are just a few notes specifically on use of the Virtual File System you just built. There are entries in .mc.ext defining a few operations that can be done on a file from an mc panel. Typically they are annotated with a hash mark and a file extension like this: # zip There must be a way to find the file by extension, so the next line does that. In essence it says "identify the string ".zip" or (|) ".ZIP" at the end ($) of a filename": regex/\.(zip|ZIP)$ The operations themselves follow that. They must be indented by at least a space, and a tab works as well. In particular, the Open operation will now use your new virtual file system by cd'ing to it like this: Open=%cd zip:%d/%p This is the line used when a file is highlighted in a panel and the user presses <Enter> or <Return>. The contents of the archive should show just as if they were in a real directory, and can be manipulated as such. The rest of the entry pertains to use of the F3 View key: View=%view{ascii} unzip -v %f And perhaps an optional icon for X: Icon=zip.xpm And perhaps an operation to extract the contents of the file, called from a menu selection: Extract=unzip %f '*' This is just an example. The current entry for .zip files has a menu selection of 'Unzip' which could be used in place of 'Extract'. What goes here depends on what items you have in, or add to, the menu system, and that's another subject. The sum of this is the .mc.ext entry: # zip regex/\.(zip|ZIP)$ Open=%cd %p/uzip:// View=%view{ascii} unzip -v %f Icon=zip.xpm Extract=unzip %f '*' Add an entry like this to the .mc.ext file in a user's home directory, If you want others to have it, add it to the mc.ext file in the mc system directory, often /etc/mc/mc.ext. Notice this file is not prepended with a dot. Once all this is done, and things are in their proper places, exit mc if you were using it, and restart it so it picks up the new information. That's all there is to it. The hardest part is making a listing function that sorts the output of a system listing command and turns it into a form that mc can use. Currently awk (or gawk) is used because nearly all systems have it. If another scripting language is available, like perl, that could also be used. PK ! A?0� patchsetfsnu ȯ�� #!/bin/sh LANG=C export LANG LC_TIME=C export LC_TIME # --- GIT ----------------------------------------------------------------------- found_git_dir() { work_dir=$1 while [ -n "$work_dir" -a "$work_dir" != "/" ]; do [ -d "${work_dir}/.git" ] && { echo "${work_dir}/.git/" return } work_dir=`dirname "$work_dir"` done echo '' } patchsetfs_list_git() { WORK_DIR=$1; shift fname=$1; shift USER=$1; shift DATE=$1; shift GIT_DIR=`found_git_dir "$WORK_DIR"` [ -z "$GIT_DIR" ] && GIT_DIR=$WORK_DIR curr_year=`date +"%Y"` git --git-dir="$GIT_DIR" log --abbrev=7 --pretty="format:%at %h %an" -- "$fname" | while read TIMESTAMP chset author do year=`date -d @"$TIMESTAMP" +"%Y"` [ "$year" = "$curr_year" ] && { DATE=`date -d @"$TIMESTAMP" +"%b %d %H:%M"` } || { DATE=`date -d @"$TIMESTAMP" +"%b %d %Y"` } NAME="$chset $author" echo "-rw-rw-rw- 1 $USER 0 0 $DATE $NAME.diff" done } patchsetfs_copyout_git() { WORK_DIR=$1; shift fname=$1; shift orig_fname=$1;shift output_fname=$1;shift chset=`echo "$orig_fname"| cut -f 1 -d " "` GIT_DIR=`found_git_dir "$WORK_DIR"` [ -z "$GIT_DIR" ] && GIT_DIR=$WORK_DIR git --git-dir="$GIT_DIR" show "$chset" -- "$fname" > "$output_fname" } # --- COMMON -------------------------------------------------------------------- patchsetfs_list() { VCS_type=$1; shift WORK_DIR=$1; shift fname=$1; shift DATE=`date +"%b %d %H:%M"` USER=`whoami` case "$VCS_type" in git) patchsetfs_list_git "$WORK_DIR" "$fname" "$USER" "$DATE" ;; esac } patchsetfs_copyout() { VCS_type=$1; shift WORK_DIR=$1; shift fname=$1; shift case "$VCS_type" in git) patchsetfs_copyout_git "$WORK_DIR" "$fname" "$@" ;; esac } # --- MAIN ---------------------------------------------------------------------- command=$1; shift tmp_file=$1; shift WORK_DIR=`head -n1 $tmp_file` fname=`tail -n2 $tmp_file | head -n1` VCS_type=`tail -n1 $tmp_file` case "$command" in list) patchsetfs_list "$VCS_type" "$WORK_DIR" "$fname" ;; copyout) patchsetfs_copyout "$VCS_type" "$WORK_DIR" "$fname" "$@" ;; *) exit 1 ;; esac exit 0 PK ! ;���� � uacenu ȯ�� #! /bin/sh # # ACE Virtual filesystem executive v0.1 # Works with unace v2.5 # Note: There are two packages for Debian: 'unace' (v1.2b) and # 'unace-nonfree' (v2.x). This script supports 'unace-nonfree' only. # 'unace', which supports only old versions of ACE archives (and is # therefore of little use), uses the pipe character to separate columns # in its listing format. # Copyright (C) 2008 Jacques Pelletier # May be distributed under the terms of the GNU Public License # <jpelletier@ieee.org> # # Define which archiver you are using with appropriate options ACE_LIST=${MC_TEST_EXTFS_LIST_CMD:-"unace l"} ACE_GET="unace x" # ACE_PUT="unace ?" not available # The 'list' command executive # Unace: DD.MM.YY HH:MM packed size ratio file # ls: mc_ace_fs_list() { if [ "x$UID" = "x" ]; then UID=`id -ru 2>/dev/null` if [ "x$UID" = "x" ]; then UID=0 fi fi $ACE_LIST "$1" | awk -v uid=$UID ' /%/ { split($1,date,".") if (date[3] > 50) date[3]=date[3] + 1900 else date[3]=date[3] + 2000 printf "-rw-r--r-- 1 %-8d %-8d %8d %02d-%02d-%04d %s %s\n", uid, 0, $4, date[2], date[1], date[3], $2, $6 }' 2>/dev/null exit 0 } # Command: copyout archivename storedfilename extractto mc_ace_fs_copyout() { $ACE_GET "$1" "$2" > /dev/null 2>&1 mv "$2" "$3" } # The main routine umask 077 cmd="$1" shift case "$cmd" in list) mc_ace_fs_list "$@" ;; copyout) mc_ace_fs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! ��� u7znu ȯ�� #! /bin/sh # # extfs support for p7zip # Written by Pavel Roskin <proski@gnu.org> # Some Bugfixes/workarounds by Sergiy Niskorodov <sgh@mail.zp.ua> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. P7ZIP=`which 7z 2>/dev/null` \ || P7ZIP=`which 7zz 2>/dev/null` \ || P7ZIP=`which 7za 2>/dev/null` \ || P7ZIP=`which 7zr 2>/dev/null` \ || P7ZIP="" # Let the test framework hook in: P7ZIP=${MC_TEST_EXTFS_LIST_CMD:-$P7ZIP} STAT=${MC_TEST_EXTFS_U7Z_STAT:-stat} mcu7zip_list () { # Symlinks are not shown - no idea how to distinguish them # Read-only files are not shown as such - it's rarely useful ugid="`id -nu` `id -ng`" date_re='^\(....\)-\(..\)-\(..\) \(..:..:..\)' # 19 chars. date_mc='\2-\3-\1 \4' empty_date_re='^ \{19\}' size_re='............' # 12 chars. empty_size_re=' \{12\}' zero_size=' 0' # archive entries can have no datetime info, 7z will use archive file datetime date_archive=`$STAT -c %y "$1" 2>/dev/null | sed -n "s/${date_re}.*/${date_mc}/p" 2>/dev/null` [ "${date_archive}"x = x ] && date_archive=`ls -lan "$1" 2>/dev/null | awk '{print $6, $7, $8}' 2>/dev/null` [ "${date_archive}"x = x ] && date_archive="01-01-1970 00:00:00" $P7ZIP l "$1" | sed -n " # If the uncompressed size is missing, we copy the compressed size onto it. # # But first, if the compressed size is missing too, set it to zero: s/^\(.\{19\} [D.]....\) $empty_size_re $empty_size_re/\1 $zero_size $zero_size/ # Next, do the copy: s/^\(.\{19\} [D.]....\) $empty_size_re \($size_re\)/\1 \2 \2/ # # (We use '.\{19\}' as the date may be missing. It may give false positives # but we don't mind: the printing commands that follow use strict patterns.). # Handle directories. s/$date_re D.... $size_re $size_re\(.*\)/drwxr-xr-x 1 $ugid 0 $date_mc \5/p s/$empty_date_re D.... $size_re $size_re\(.*\)/drwxr-xr-x 1 $ugid 0 $date_archive \1/p # Handle normal files. s/$date_re \..... \($size_re\) $size_re\(.*\)/-rw-r--r-- 1 $ugid \5 $date_mc \6/p s/$empty_date_re \..... \($size_re\) $size_re\(.*\)/-rw-r--r-- 1 $ugid \1 $date_archive \2/p " } mcu7zip_copyout () { #first we check if we have old p7zip archive with prefix ./ in filename $P7ZIP l "$1" "$2" | grep -q "0 files, 0 folders" && \ EXFNAME='*./'"$2" || EXFNAME="$2" $P7ZIP e -so "$1" "$EXFNAME" > "$3" 2>/dev/null } mcu7zip_copyin () { $P7ZIP a -si"$2" "$1" <"$3" >/dev/null 2>&1 } mcu7zip_mkdir () { dir=`mktemp -d "${MC_TMPDIR:-/tmp}/mctmpdir-u7z.XXXXXX"` || exit 1 mkdir -p "$dir"/"$2" $P7ZIP a -w"$dir" "$1" "$dir"/"$2" >/dev/null 2>&1 rm -rf "$dir" } mcu7zip_rm () { # NOTE: Version 4.20 fails to delete files in subdirectories #first we check if we have old p7zip archive with prefix ./ in filename $P7ZIP l "$1" "$2" | grep -q "0 files, 0 folders" && \ EXFNAME='*./'"$2" || EXFNAME="$2" $P7ZIP d "$1" "$EXFNAME" 2>&1 | grep -q E_NOTIMPL > /dev/null 2>&1 && \ { printf "Function not implemented...\n7z cannot delete from solid archive." >&2 ; exit 1 ; } } mcu7zip_rmdir () { #first we check if we have old p7zip archive with prefix ./ in filename $P7ZIP l "$1" "$2" | grep -q "0 files, 0 folders" && \ EXFNAME='*./'"$2" || EXFNAME="$2" $P7ZIP d "$1" "$EXFNAME"/ 2>&1 | grep -q E_NOTIMPL > /dev/null 2>&1 && \ { printf "Function not implemented...\n7z cannot delete from solid archive." >&2 ; exit 1 ; } } # override any locale for dates LC_DATE=C export LC_DATE umask 077 if [ -z "$P7ZIP" ]; then echo "Error: could not find p7zip (looked for 7z, 7za and 7zr)" >&2 exit 1 fi cmd="$1" shift case "$cmd" in list) mcu7zip_list "$@" | sort -k 8 ;; copyout) mcu7zip_copyout "$@" ;; copyin) mcu7zip_copyin "$@" ;; mkdir) mcu7zip_mkdir "$@" ;; rm) mcu7zip_rm "$@" ;; rmdir) mcu7zip_rmdir "$@" ;; *) exit 1 ;; esac exit 0 PK ! _u?�� � mailfsnu ȯ�� #! /usr/bin/perl -w use bytes; # MC extfs for (possibly compressed) Berkeley style mailbox files # Peter Daum <gator@cs.tu-berlin.de> (Jan 1998, mc-4.1.24) $zcat="zcat"; # gunzip to stdout $bzcat="bzip2 -dc"; # bunzip2 to stdout $lzipcat="lzip -dc"; # unlzip to stdout $lz4cat="lz4 -dc"; # unlz4 to stdout $lzcat="lzma -dc"; # unlzma to stdout $xzcat="xz -dc"; # unxz to stdout $zstdcat="zstd -dc"; # unzstd to stdout $file="file"; # "file" command $TZ='GMT'; # default timezone (for Date module) if (eval "require Date::Parse") { import Date::Parse; $parse_date= sub { local $ftime = str2time($_[0],$TZ); $_ = localtime($ftime); /^(...) (...) ([ \d]\d) (\d\d:\d\d):\d\d (\d\d\d\d)$/; if ($ftime + 6 * 30 * 24 * 60 * 60 < $now || $ftime + 60 * 60 > $now) { return "$2 $3 $5"; } else { return "$2 $3 $4"; } } } elsif (eval "require Date::Manip") { import Date::Manip; $parse_date= sub { return UnixDate($_[0], "%l"); # "ls -l" format } } else { # use "light" version $parse_date= sub { local $mstring='GeeJanFebMarAprMayJunJulAugSepOctNovDec'; # assumes something like: Mon, 5 Jan 1998 16:08:19 +0200 (GMT+0200) # if you have mails with another date format, add it here if (/(\d\d?) ([A-Z][a-z][a-z]) (\d\d\d\d) (\d\d?):(\d\d)/) { $day = $1; $month = $2; $mon = index($mstring,$month) / 3; $year = $3; $hour = $4; $min = $5; # pass time not year for files younger than roughly 6 months # but not for files with dates more than 1-2 hours in the future if ($year * 12 + $mon > $thisyear * 12 + $thismon - 7 && $year * 12 + $mon <= $thisyear * 12 + $thismon && ! (($year * 12 + $mon) * 31 + $day == ($thisyear * 12 + $thismon) * 31 + $thisday && $hour > $thishour + 2)) { return "$month $day $hour:$min"; } else { return "$month $day $year"; } } # Y2K bug. # Date: Mon, 27 Mar 100 16:30:47 +0000 (GMT) if (/(\d\d?) ([A-Z][a-z][a-z]) (1?\d\d) (\d\d?):(\d\d)/) { $day = $1; $month = $2; $mon = index($mstring,$month) / 3; $year = 1900 + $3; $hour = $4; $min = $5; if ($year < 1970) { $year += 100; } if ($year * 12 + $mon > $thisyear * 12 + $thismon - 7 && $year * 12 + $mon <= $thisyear * 12 + $thismon && ! (($year * 12 + $mon) * 31 + $day == ($thisyear * 12 + $thismon) * 31 + $thisday && $hour > $thishour + 2)) { return "$month $day $hour:$min"; } else { return "$month $day $year"; } } # AOLMail(SM). # Date: Sat Jul 01 10:06:06 2000 if (/([A-Z][a-z][a-z]) (\d\d?) (\d\d?):(\d\d)(:\d\d)? (\d\d\d\d)/) { $month = $1; $mon = index($mstring,$month) / 3; $day = $2; $hour = $3; $min = $4; $year = $6; if ($year * 12 + $mon > $thisyear * 12 + $thismon - 7 && $year * 12 + $mon <= $thisyear * 12 + $thismon && ! (($year * 12 + $mon) * 31 + $day == ($thisyear * 12 + $thismon) * 31 + $thisday && $hour > $thishour + 2)) { return "$month $day $hour:$min"; } else { return "$month $day $year"; } } # Fallback return $fallback; } } sub process_header { while (<IN>) { $size+=length; s/\r$//; last if /^$/; die "unexpected EOF\n" if eof; if (/^date:\s(.*)$/i) { $date=&$parse_date($1); } elsif (/^subject:\s(.*)$/i) { $subj=lc($1); $subj=~ s/^(re:\s?)+//gi; # no leading Re: $subj=~ tr/a-zA-Z0-9//cd; # strip all "special" characters } elsif (/^from:\s.*?(\w+)\@/i) { $from=$1; } elsif (/^to:\s.*?(\w+)\@/i) { $to=lc($1); } } } sub print_dir_line { $from=$to if ($from eq $user); # otherwise, it would look pretty boring $date=localtime(time) if (!defined $date); printf "-r-------- 1 $< $< %d %s %3.3d_%.25s\n", $size, $date, $msg_nr, "${from}_${subj}"; } sub mailfs_list { my $blank = 1; $user=$ENV{USER}||getlogin||getpwuid($<) || "nobody"; while(<IN>) { s/\r$//; if($blank && /^from\s+\w+(\.\w+)*@/i) { # Start of header print_dir_line unless (!$msg_nr); $size=length; $msg_nr++; ($from,$to,$subj,$date)=("none","none","none", "01-01-80"); process_header; $line=$blank=0; } else { $size+=length; $line++; $blank= /^$/; } } print_dir_line unless (!$msg_nr); exit 0; } sub mailfs_copyout { my($source,$dest)=@_; exit 1 unless (open STDOUT, ">$dest"); ($nr)= ($source =~ /^(\d+)/); # extract message number from "filename" my $blank = 1; while(<IN>) { s/\r$//; if($blank && /^from\s+\w+(\.\w+)*@/i) { $msg_nr++; exit(0) if ($msg_nr > $nr); $blank= 0; } else { $blank= /^$/; } print if ($msg_nr == $nr); } } # main { exit 1 unless ($#ARGV >= 1); $msg_nr=0; $cmd=shift; $mbox_name=shift; my $mbox_qname = quotemeta ($mbox_name); $_=`$file $mbox_qname`; if (/gzip/) { exit 1 unless (open IN, "$zcat $mbox_qname|"); } elsif (/bzip/) { exit 1 unless (open IN, "$bzcat $mbox_qname|"); } elsif (/lzip/) { exit 1 unless (open IN, "$lzipcat $mbox_qname|"); } elsif (/lz4/) { exit 1 unless (open IN, "$lz4cat $mbox_qname|"); } elsif (/lzma/) { exit 1 unless (open IN, "$lzcat $mbox_qname|"); } elsif (/xz/) { exit 1 unless (open IN, "$xzcat $mbox_qname|"); } elsif (/zst/) { exit 1 unless (open IN, "$zstdcat $mbox_qname|"); } else { exit 1 unless (open IN, "<$mbox_name"); } umask 077; if($cmd eq "list") { $now = time; $_ = localtime($now); /^... (... [ \d]\d \d\d:\d\d):\d\d \d\d\d\d$/; $fallback = $1; $nowstring=`date "+%Y %m %d %H"`; ($thisyear, $thismon, $thisday, $thishour) = split(/ /, $nowstring); &mailfs_list; exit 0; } elsif($cmd eq "copyout") { &mailfs_copyout(@ARGV); exit 0; } exit 1; PK ! �k:�� � ualznu ȯ�� #!/bin/sh # # Written by Pavel Roskin <proski@gnu.org> # (C) 2005 The Free Software Foundation. # # UNALZ=unalz mcualz_list () { $UNALZ -l "$1" | awk -v uid=`id -nu` -v gid=`id -ng` ' { if ($1 ~ /[0-9][0-9][:/][0-9][0-9][:/][0-9][0-9]$/) { # Kludge for non-POSIX date format in unalz 0.50 split($1, date, "[/:]") if (length(date[1]) == 4) { pdate = date[2] "/" date[3] "/" date[1] } else { pdate = date[1] "/" date[2] "/" date[3] } time=$2 perm=$3 size=$4 sub(/^ *[^ ]* *[^ ]* *[^ ]* *[^ ]* *[^ ]* */, "") file=$0 gsub(/\\/, "/", file) if (perm ~ /.D../) perm = "drwxr-xr-x" else perm = "-rw-r--r--" printf "%s 1 %s %s %d %s %s %s\n", perm, uid, gid, size, pdate, time, file } } ' } mcualz_copyout () { TMPDIR=`mktemp -d ${MC_TMPDIR:-/tmp}/mctmpdir-ualz.XXXXXX` || exit 1 # This is a workaround for a bug in unalz 0.50 - it crashes if the # output directory is an absolute path. dir=`dirname "$TMPDIR/$2"` mkdir -p "$dir" $UNALZ -d "$TMPDIR" "$1" "$2" >/dev/null cat "$TMPDIR/$2" > "$3" rm -rf "$TMPDIR" } # override any locale for dates LC_ALL=C export LC_ALL umask 077 cmd="$1" shift case "$cmd" in list) mcualz_list "$@" ;; copyout) mcualz_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! ��m�; �; uzipnu ȯ�� #! /usr/bin/perl -w # # zip file archive Virtual File System for Midnight Commander # Version 1.4.0 (2001-08-07). # # (C) 2000-2001 Oskar Liljeblad <osk@hem.passagen.se>. # use POSIX; use File::Basename; use strict; # # Configuration options # # Location of the zip program my $app_zip = "/usr/bin/zip"; # Location of the unzip program my $app_unzip = $ENV{MC_TEST_EXTFS_LIST_CMD} || "/usr/bin/unzip"; # Set this to 1 if zipinfo (unzip -Z) is to be used (recommended), otherwise 0. my $op_has_zipinfo = exists($ENV{MC_TEST_EXTFS_HAVE_ZIPINFO}) ? $ENV{MC_TEST_EXTFS_HAVE_ZIPINFO} : 1; # Command used to list archives (zipinfo mode) my $cmd_list_zi = "$app_unzip -Z -l -T"; # Command used to list archives (non-zipinfo mode) my $cmd_list_nzi = "$app_unzip -qq -v"; # Command used to add a file to the archive my $cmd_add = "$app_zip -g"; # Command used to add a link file to the archive (unused) my $cmd_addlink = "$app_zip -g -y"; # Command used to delete a file from the archive my $cmd_delete = "$app_zip -d"; # Command used to extract a file to standard out my $cmd_extract = "$app_unzip -P '' -p"; # -rw-r--r-- 2.2 unx 2891 tx 1435 defN 20000330.211927 ./edit.html # (perm) (?) (?) (size) (?) (zippedsize) (method) (yyyy)(mm)(dd).(HH)(MM)(SS) (fname) my $regex_zipinfo_line = qr"^(\S{7,10})\s+(\d+\.\d+)\s+(\S+)\s+(\d+)\s+(\S\S)\s+(\d+)\s+(\S{4})\s+(\d{4})(\d\d)(\d\d)\.(\d\d)(\d\d)(\d\d)\s(.*)$"; # 2891 Defl:N 1435 50% 03-30-00 21:19 50cbaaf8 ./edit.html # (size) (method) (zippedsize) (zipratio) (mm)-(dd)-(yy|yyyy) (HH):(MM) (cksum) (fname) # or: (yyyy)-(mm)-(dd) my $regex_nonzipinfo_line = qr"^\s*(\d+)\s+(\S+)\s+(\d+)\s+(-?\d+\%)\s+(\d+)-(\d?\d)-(\d+)\s+(\d?\d):(\d\d)\s+([0-9a-f]+)\s\s(.*)$"; # # Main code # die "uzip: missing command and/or archive arguments\n" if ($#ARGV < 1); # Initialization of some global variables my $cmd = shift; my %known = ( './' => 1 ); my %pending = (); my $oldpwd = POSIX::getcwd(); my $archive = shift; my $aarchive = absolutize($archive, $oldpwd); my $cmd_list = ($op_has_zipinfo ? $cmd_list_zi : $cmd_list_nzi); my ($qarchive, $aqarchive) = map (quotemeta, $archive, $aarchive); # Strip all "." and ".." path components from a pathname. sub zipfs_canonicalize_pathname($) { my ($fname) = @_; $fname =~ s,/+,/,g; $fname =~ s,(^|/)(?:\.?\./)+,$1,; return $fname; } # The Midnight Commander never calls this script with archive pathnames # starting with either "./" or "../". Some ZIP files contain such names, # so we need to build a translation table for them. my $zipfs_realpathname_table = undef; sub zipfs_realpathname($) { my ($fname) = @_; if (!defined($zipfs_realpathname_table)) { $zipfs_realpathname_table = {}; if (!open(ZIP, "$cmd_list $qarchive |")) { return $fname; } foreach my $line (<ZIP>) { $line =~ s/\r*\n*$//; if ($op_has_zipinfo) { if ($line =~ $regex_zipinfo_line) { my ($fname) = ($14); $zipfs_realpathname_table->{zipfs_canonicalize_pathname($fname)} = $fname; } } else { if ($line =~ $regex_nonzipinfo_line) { my ($fname) = ($11); $zipfs_realpathname_table->{zipfs_canonicalize_pathname($fname)} = $fname; } } } if (!close(ZIP)) { return $fname; } } if (exists($zipfs_realpathname_table->{$fname})) { return $zipfs_realpathname_table->{$fname}; } return $fname; } if ($cmd eq 'list') { &mczipfs_list(@ARGV); } if ($cmd eq 'rm') { &mczipfs_rm(@ARGV); } if ($cmd eq 'rmdir') { &mczipfs_rmdir(@ARGV); } if ($cmd eq 'mkdir') { &mczipfs_mkdir(@ARGV); } if ($cmd eq 'copyin') { &mczipfs_copyin(@ARGV); } if ($cmd eq 'copyout') { &mczipfs_copyout(@ARGV); } if ($cmd eq 'run') { &mczipfs_run(@ARGV); } #if ($cmd eq 'mklink') { &mczipfs_mklink(@ARGV); } # Not supported by MC extfs #if ($cmd eq 'linkout') { &mczipfs_linkout(@ARGV); } # Not supported by MC extfs exit 1; # Remove a file from the archive. sub mczipfs_rm { my ($qfile) = map { &zipquotemeta(zipfs_realpathname($_)) } @_; # "./" at the beginning of pathnames is stripped by Info-ZIP, # so convert it to "[.]/" to prevent stripping. $qfile =~ s/^\\\./[.]/; &checkargs(1, 'archive file', @_); &safesystem("$cmd_delete $qarchive $qfile >/dev/null"); exit; } # Remove an empty directory from the archive. # The only difference from mczipfs_rm is that we append an # additional slash to the directory name to remove. I am not # sure this is absolutely necessary, but it doesn't hurt. sub mczipfs_rmdir { my ($qfile) = map { &zipquotemeta(zipfs_realpathname($_)) } @_; &checkargs(1, 'archive directory', @_); &safesystem("$cmd_delete $qarchive $qfile/ >/dev/null", 12); exit; } # Extract a file from the archive. # Note that we don't need to check if the file is a link, # because mc apparently doesn't call copyout for symbolic links. sub mczipfs_copyout { my ($qafile, $qfsfile) = map { &zipquotemeta(zipfs_realpathname($_)) } @_; &checkargs(1, 'archive file', @_); &checkargs(2, 'local file', @_); &safesystem("$cmd_extract $qarchive $qafile > $qfsfile", 11); exit; } # Add a file to the archive. # This is done by making a temporary directory, in which # we create a symlink the original file (with a new name). # Zip is then run to include the real file in the archive, # with the name of the symbolic link. # Here we also doesn't need to check for symbolic links, # because the mc extfs doesn't allow adding of symbolic # links. sub mczipfs_copyin { my ($afile, $fsfile) = @_; &checkargs(1, 'archive file', @_); &checkargs(2, 'local file', @_); my ($qafile) = quotemeta $afile; $fsfile = &absolutize($fsfile, $oldpwd); my $adir = File::Basename::dirname($afile); my $tmpdir = &mktmpdir(); chdir $tmpdir || &croak("chdir $tmpdir failed"); &mkdirs($adir, 0700); symlink ($fsfile, $afile) || &croak("link $afile failed"); &safesystem("$cmd_add $aqarchive $qafile >/dev/null"); unlink $afile || &croak("unlink $afile failed"); &rmdirs($adir); chdir $oldpwd || &croak("chdir $oldpwd failed"); rmdir $tmpdir || &croak("rmdir $tmpdir failed"); exit; } # Add an empty directory the the archive. # This is similar to mczipfs_copyin, except that we don't need # to use symlinks. sub mczipfs_mkdir { my ($dir) = @_; &checkargs(1, 'directory', @_); my ($qdir) = quotemeta $dir; my $tmpdir = &mktmpdir(); chdir $tmpdir || &croak("chdir $tmpdir failed"); &mkdirs($dir, 0700); &safesystem("$cmd_add $aqarchive $qdir >/dev/null"); &rmdirs($dir); chdir $oldpwd || &croak("chdir $oldpwd failed"); rmdir $tmpdir || &croak("rmdir $tmpdir failed"); exit; } # Add a link to the archive. This operation is not used yet, # because it is not supported by the MC extfs. sub mczipfs_mklink { my ($linkdest, $afile) = @_; &checkargs(1, 'link destination', @_); &checkargs(2, 'archive file', @_); my ($qafile) = quotemeta $afile; my $adir = File::Basename::dirname($afile); my $tmpdir = &mktmpdir(); chdir $tmpdir || &croak("chdir $tmpdir failed"); &mkdirs($adir, 0700); symlink ($linkdest, $afile) || &croak("link $afile failed"); &safesystem("$cmd_addlink $aqarchive $qafile >/dev/null"); unlink $afile || &croak("unlink $afile failed"); &rmdirs($adir); chdir $oldpwd || &croak("chdir $oldpwd failed"); rmdir $tmpdir || &croak("rmdir $tmpdir failed"); exit; } # This operation is not used yet, because it is not # supported by the MC extfs. sub mczipfs_linkout { my ($afile, $fsfile) = @_; &checkargs(1, 'archive file', @_); &checkargs(2, 'local file', @_); my ($qafile) = map { &zipquotemeta($_) } $afile; my $linkdest = &get_link_destination($afile); symlink ($linkdest, $fsfile) || &croak("link $fsfile failed"); exit; } # Use unzip to find the link destination of a certain file in the # archive. sub get_link_destination { my ($afile) = @_; my ($qafile) = map { &zipquotemeta($_) } $afile; my $linkdest = safeticks("$cmd_extract $qarchive $qafile"); &croak ("extract failed", "link destination of $afile not found") if (!defined $linkdest || $linkdest eq ''); return $linkdest; } # List files in the archive. # Because mc currently doesn't allow a file's parent directory # to be listed after the file itself, we need to do some # rearranging of the output. Most of this is done in # checked_print_file. sub mczipfs_list { open (PIPE, "$cmd_list $qarchive |") || &croak("$app_unzip failed"); if ($op_has_zipinfo) { while (<PIPE>) { chomp; next if /^Archive:/; next if /^\d+ file/; next if /^Empty zipfile\.$/; my @match = /$regex_zipinfo_line/; next if ($#match != 13); &checked_print_file(@match); } } else { while (<PIPE>) { chomp; my @match = /$regex_nonzipinfo_line/; next if ($#match != 10); # Massage the date. my ($year, $month, $day) = $match[4] > 12 ? ($match[4], $match[5], $match[6]) # 4,5,6 = Y,M,D : ($match[6], $match[4], $match[5]); # 4,5,6 = M,D,Y $year += ($year < 70 ? 2000 : 1900) if $year < 100; # Fix 2-digit year. my @rmatch = ('', '', 'unknown', $match[0], '', $match[2], $match[1], $year, $month, $day, $match[7], $match[8], "00", $match[10]); &checked_print_file(@rmatch); } } if (!close (PIPE)) { &croak("$app_unzip failed") if ($! != 0); &croak("$app_unzip failed", 'non-zero exit status ('.($? >> 8).')') } foreach my $key (sort keys %pending) { foreach my $file (@{ $pending{$key} }) { &print_file(@{ $file }); } } exit; } # Execute a file in the archive, by first extracting it to a # temporary directory. The name of the extracted file will be # the same as the name of it in the archive. sub mczipfs_run { my ($afile) = @_; &checkargs(1, 'archive file', @_); my $qafile = &zipquotemeta(zipfs_realpathname($afile)); my $tmpdir = &mktmpdir(); my $tmpfile = File::Basename::basename($afile); chdir $tmpdir || &croak("chdir $tmpdir failed"); &safesystem("$cmd_extract $aqarchive $qafile > $tmpfile"); chmod 0700, $tmpfile; &safesystem("./$tmpfile"); unlink $tmpfile || &croak("rm $tmpfile failed"); chdir $oldpwd || &croak("chdir $oldpwd failed"); rmdir $tmpdir || &croak("rmdir $tmpdir failed"); exit; } # This is called prior to printing the listing of a file. # A check is done to see if the parent directory of the file has already # been printed or not. If it hasn't, we must cache it (in %pending) and # print it later once the parent directory has been listed. When all # files have been processed, there may still be some that haven't been # printed because their parent directories weren't listed at all. These # files are dealt with in mczipfs_list. sub checked_print_file { my @waiting = ([ @_ ]); while ($#waiting != -1) { my $item = shift @waiting; my $filename = ${$item}[13]; my $dirname = File::Basename::dirname($filename) . '/'; if (exists $known{$dirname}) { &print_file(@{$item}); if ($filename =~ /\/$/) { $known{$filename} = 1; if (exists $pending{$filename}) { push @waiting, @{ $pending{$filename} }; delete $pending{$filename}; } } } else { push @{$pending{$dirname}}, $item; } } } # Print the mc extfs listing of a file from a set of parsed fields. # If the file is a link, we extract it from the zip archive and # include the output as the link destination. Because this output # is not newline terminated, we must execute unzip once for each # link file encountered. sub print_file { my ($perms,$zipver,$platform,$realsize,$format,$cmpsize,$comp,$year,$mon,$day,$hours,$mins,$secs,$filename) = @_; if ($platform ne 'unx') { $perms = ($filename =~ /\/$/ ? 'drwxr-xr-x' : '-rw-r--r--'); } # adjust abnormal perms on directory if ($platform eq 'unx' && $filename =~ /\/$/ && $perms =~ /^\?(.*)$/) { $perms = 'd'.$1; } printf "%-10s 1 %-8d %-8d %8s %s/%s/%s %s:%s:%s ./%s", $perms, $<, $(, $realsize, $mon, $day, $year, $hours, $mins, $secs, $filename; if ($platform eq 'unx' && $perms =~ /^l/) { my $linkdest = &get_link_destination($filename); print " -> $linkdest"; } print "\n"; } # Die with a reasonable error message. sub croak { my ($command, $desc) = @_; die "uzip ($cmd): $command - $desc\n" if (defined $desc); die "uzip ($cmd): $command - $!\n"; } # Make a set of directories, like the command `mkdir -p'. # This subroutine has been tailored for this script, and # because of that, it ignored the directory name '.'. sub mkdirs { my ($dirs, $mode) = @_; $dirs = &cleandirs($dirs); return if ($dirs eq '.'); my $newpos = -1; while (($newpos = index($dirs, '/', $newpos+1)) != -1) { my $dir = substr($dirs, 0, $newpos); mkdir ($dir, $mode) || &croak("mkdir $dir failed"); } mkdir ($dirs, $mode) || &croak("mkdir $dirs failed"); } # Remove a set of directories, failing if the directories # contain other files. # This subroutine has been tailored for this script, and # because of that, it ignored the directory name '.'. sub rmdirs { my ($dirs) = @_; $dirs = &cleandirs($dirs); return if ($dirs eq '.'); rmdir $dirs || &croak("rmdir $dirs failed"); my $newpos = length($dirs); while (($newpos = rindex($dirs, '/', $newpos-1)) != -1) { my $dir = substr($dirs, 0, $newpos); rmdir $dir || &croak("rmdir $dir failed"); } } # Return a semi-canonical directory name. sub cleandirs { my ($dir) = @_; $dir =~ s:/+:/:g; $dir =~ s:/*$::; return $dir; } # Make a temporary directory with mode 0700. sub mktmpdir { use File::Temp qw(mkdtemp); my $template = "/tmp/mcuzipfs.XXXXXX"; $template="$ENV{MC_TMPDIR}/mcuzipfs.XXXXXX" if ($ENV{MC_TMPDIR}); return mkdtemp($template); } # Make a filename absolute and return it. sub absolutize { my ($file, $pwd) = @_; return "$pwd/$file" if ($file !~ /^\//); return $file; } # Like the system built-in function, but with error checking. # The other argument is an exit status to allow. sub safesystem { my ($command, @allowrc) = @_; my ($desc) = ($command =~ /^([^ ]*) */); $desc = File::Basename::basename($desc); system $command; my $rc = $?; &croak("`$desc' failed") if (($rc & 0xFF) != 0); if ($rc != 0) { $rc = $rc >> 8; foreach my $arc (@allowrc) { return if ($rc == $arc); } &croak("`$desc' failed", "non-zero exit status ($rc)"); } } # Like backticks built-in, but with error checking. sub safeticks { my ($command, @allowrc) = @_; my ($desc) = ($command =~ /^([^ ]*) /); $desc = File::Basename::basename($desc); my $out = `$command`; my $rc = $?; &croak("`$desc' failed") if (($rc & 0xFF) != 0); if ($rc != 0) { $rc = $rc >> 8; foreach my $arc (@allowrc) { return if ($rc == $arc); } &croak("`$desc' failed", "non-zero exit status ($rc)"); } return $out; } # Make sure enough arguments are supplied, or die. sub checkargs { my $count = shift; my $desc = shift; &croak('missing argument', $desc) if ($count-1 > $#_); } # Quote zip wildcard metacharacters. Unfortunately Info-ZIP zip and unzip # on unix interpret some wildcards in filenames, despite the fact that # the shell already does this. Thus this function. sub zipquotemeta { my ($name) = @_; my $out = ''; for (my $c = 0; $c < length $name; $c++) { my $ch = substr($name, $c, 1); $out .= '\\' if (index('*?[]\\', $ch) != -1); $out .= $ch; } return quotemeta($out); } PK ! 3�;�� � rpms+nu ȯ�� #! /usr/bin/perl # # Written by Balazs Nagy (julian7@kva.hu) 1998 # locale bugfix by Michal Svec (rebel@penguin.cz) 2000 # (C) 1998 The Free Software Foundation. # # # override any locale for dates delete $ENV{"LC_ALL"}; $ENV{"LC_TIME"}="C"; #print $ENV{"LC_ALL"}; #exit 0; sub gd { my ($dt) = @_; $dt =~ tr/ //s; $dt =~ s/^\w+ (\w+) (\d+) (\d+:\d+):\d+ .+\n?$/$1 $2 $3/; return $dt; } $DATE=gd(`date`); sub list { my (@rpms, %files, $i, $fn, $dn, $sz, $bt); # @rpms = `rpm -qa --qf "\%{NAME}-\%{VERSION}-\%{RELEASE}:\%{GROUP}:\%{SIZE}:\%{BUILDTIME:date}\n"`; @rpms = `rpm -qa --qf "\%{NAME}-\%{VERSION}:\%{GROUP}:\%{SIZE}:\%{BUILDTIME:date}\n"`; print @trpms; %files = (); %sizes = (); %dates = (); for $i (@rpms) { if ($i =~ /^([^:]+):([^:]+):([^:]+):(.+)$/) { ($fn, $dn, $sz, $bt) = ($1, $2, $3, $4); $dn =~ s/ /_/g; if (defined $files{$dn}) { push(@{$files{$dn}}, $fn); } else { @{$files{$dn}} = ($fn); } $sizes{$fn} = $sz; $dates{$fn} = gd($bt); } } for $i (sort keys %files) { print "dr-xr-xr-x 1 root root 0 $DATE $i/\n"; for $fn (sort @{$files{$i}}) { print "-r--r--r-- 1 root root $sizes{$fn} $dates{$fn} $i/$fn.trpm\n"; } } } #open O, ">>/tmp/tt"; #print O "RPMS: "; #for $i (@ARGV) { # print O "$i "; #} #print O "\n"; #close O; if ($ARGV[0] eq "list") { list(); exit(0); } elsif ($ARGV[0] eq "copyout") { open O, ">$ARGV[3]"; print O $ARGV[2], "\n"; close O; exit(0); } exit(1); PK ! �[�� � README.extfsnu �[��� # Each external VFS type must be registered in extfs.d directory if you want to use it. # Trailing plus means that the filesystem is not tied to a certain file. # Popular PC archivers uzip uzoo ulha urar uha u7z ualz # FIXME: for arj usage you need a special patch to unarj (see unarj.diff) uarj uarc uace # For cab files ucab # ar is used for static libraries uar # Packages from popular Linux distributions rpm deb # a+ - mtools filesystem a+ # For browsing lslR listings (found on many ftp sites) lslR # Hewlett Packard calculator hp48+ # Commodore 64/128 d64/D64 files uc1541 # Break patches into chunks patchfs # Represents a mailbox as a directory mailfs # List all installed RPM packages on the system rpms+ trpm # dpkg frontend dpkg+ debd # apt frontend apt+ deba # Simple filesystem for audio cdroms. Use /dev/cdrom#audio (or /#audio) audio # Package of Bad Penguin (an Italian GNU/Linux distribution) bpp # ISO image iso9660 # Amazon S3 s3+ # git frontend gitfs - browse the git repo changesetfs - list of versions of current file patchsetfs - list of patches of current file # Gputils lib archives. ulib # PAK Archive unar PK ! =��X �X uc1541nu ȯ�� #!/usr/bin/env python """ UC1541 Virtual filesystem Author: Roman 'gryf' Dobosz <gryf73@gmail.com> Date: 2019-09-20 Version: 3.3 Licence: BSD source: https://bitbucket.org/gryf/uc1541 mirror: https://github.com/gryf/uc1541 """ import sys import re import os import gzip from subprocess import Popen, PIPE if os.getenv('UC1541_DEBUG'): import logging LOG = logging.getLogger('UC1541') LOG.setLevel(logging.DEBUG) FILE_HANDLER = logging.FileHandler("/tmp/uc1541.log") FILE_FORMATTER = logging.Formatter("%(asctime)s %(levelname)-8s " "%(lineno)s %(funcName)s - %(message)s") FILE_HANDLER.setFormatter(FILE_FORMATTER) FILE_HANDLER.setLevel(logging.DEBUG) LOG.addHandler(FILE_HANDLER) else: class LOG(object): """ Dummy logger object. Does nothing. """ @classmethod def debug(*args, **kwargs): pass @classmethod def info(*args, **kwargs): pass @classmethod def warning(*args, **kwargs): pass @classmethod def error(*args, **kwargs): pass @classmethod def critical(*args, **kwargs): pass SECLEN = 256 def _ord(string_or_int): """ Return an int value for the (possible) string passed in argument. This function is for compatibility between python2 and python3, where single element in byte string array is a string or an int respectively. """ try: return ord(string_or_int) except TypeError: return string_or_int def _get_raw(dimage): """ Try to get contents of the D64 image either it's gzip compressed or not. """ raw = None with gzip.open(dimage, 'rb') as fobj: # Although the common approach with gzipped files is to check the # magic number, in this case there is no guarantee that first track # does not contain exactly the same byte sequence as the magic number. # So the only way left is to actually try to uncompress the file. try: raw = fobj.read() except (IOError, OSError): pass if not raw: with open(dimage, 'rb') as fobj: raw = fobj.read() return raw def _get_implementation(disk): """ Check the file under fname and return right class for creating an object corresponding for the file """ len_map = {822400: D81, # 80 tracks 819200: D81, # 80 tracks, 3200 error bytes 349696: D71, # 70 tracks 351062: D71, # 70 tracks, 1366 error bytes 174848: D64, # usual d64 disc image, 35 tracks, no errors 175531: D64, # 35 track, 683 error bytes 196608: D64, # 40 track, no errors 197376: D64} # 40 track, 768 error bytes if disk[:32].startswith(b'C64'): return # T64 return len_map.get(len(disk))(disk) class Disk(object): """ Represent common disk interface """ CHAR_MAP = {32: ' ', 33: '!', 34: '"', 35: '#', 37: '%', 38: '&', 39: "'", 40: '(', 41: ')', 42: '*', 43: '+', 44: ',', 45: '-', 46: '.', 47: '/', 48: '0', 49: '1', 50: '2', 51: '3', 52: '4', 53: '5', 54: '6', 55: '7', 56: '8', 57: '9', 59: ';', 60: '<', 61: '=', 62: '>', 63: '?', 64: '@', 65: 'a', 66: 'b', 67: 'c', 68: 'd', 69: 'e', 70: 'f', 71: 'g', 72: 'h', 73: 'i', 74: 'j', 75: 'k', 76: 'l', 77: 'm', 78: 'n', 79: 'o', 80: 'p', 81: 'q', 82: 'r', 83: 's', 84: 't', 85: 'u', 86: 'v', 87: 'w', 88: 'x', 89: 'y', 90: 'z', 91: '[', 93: ']', 97: 'A', 98: 'B', 99: 'C', 100: 'D', 101: 'E', 102: 'F', 103: 'G', 104: 'H', 105: 'I', 106: 'J', 107: 'K', 108: 'L', 109: 'M', 110: 'N', 111: 'O', 112: 'P', 113: 'Q', 114: 'R', 115: 'S', 116: 'T', 117: 'U', 118: 'V', 119: 'W', 120: 'X', 121: 'Y', 122: 'Z', 193: 'A', 194: 'B', 195: 'C', 196: 'D', 197: 'E', 198: 'F', 199: 'G', 200: 'H', 201: 'I', 202: 'J', 203: 'K', 204: 'L', 205: 'M', 206: 'N', 207: 'O', 208: 'P', 209: 'Q', 210: 'R', 211: 'S', 212: 'T', 213: 'U', 214: 'V', 215: 'W', 216: 'X', 217: 'Y', 218: 'Z'} FILE_TYPES = {0b000: 'del', 0b001: 'seq', 0b010: 'prg', 0b011: 'usr', 0b100: 'rel'} DIR_TRACK = 18 DIR_SECTOR = 1 def __init__(self, raw): """ Init """ self.raw = raw self.current_sector_data = None self.next_sector = 0 self.next_track = None self._dir_contents = [] self._already_done = [] def _map_filename(self, string): """ Transcode filename to ASCII compatible. Replace not supported characters with jokers. """ filename = list() for chr_ in string: if _ord(chr_) == 160: # shift+space character; $a0 break character = D64.CHAR_MAP.get(_ord(chr_), '?') filename.append(character) # special cases if filename[0] == "-": filename[0] = "?" LOG.debug("string: ``%s'' mapped to: ``%s''", string, "".join(filename)) return "".join(filename) def _go_to_next_sector(self): """ Fetch (if exist) next sector from a directory chain Return False if the chain ends, True otherwise """ # Well, self.next_sector _should_ have value $FF, but apparently there # are the cases where it is not, therefore checking for that will not # be performed and value of $00 on the next track will end the # directory if self.next_track == 0: LOG.debug("End of directory") return False if self.next_track is None: LOG.debug("Going to the track: %s, %s", self.DIR_TRACK, self.DIR_SECTOR) offset = self._get_offset(self.DIR_TRACK, self.DIR_SECTOR) else: offset = self._get_offset(self.next_track, self.next_sector) LOG.debug("Going to the track: %s,%s", self.next_track, self.next_sector) self.current_sector_data = self.raw[offset:offset + SECLEN] # Guard for reading data out of bound - that happened for discs which # store only raw data, even on directory track if not self.current_sector_data: return False self.next_track = _ord(self.current_sector_data[0]) self.next_sector = _ord(self.current_sector_data[1]) if (self.next_track, self.next_sector) in self._already_done: # Just a failsafe. Endless loop is not what is expected. LOG.debug("Loop in track/sector pointer at %d,%d", self.next_track, self.next_sector) self._already_done = [] return False self._already_done.append((self.next_track, self.next_sector)) LOG.debug("Next track: %s,%s", self.next_track, self.next_sector) return True def _get_ftype(self, num): """ Get filetype as a string """ return D64.FILE_TYPES.get(int("%d%d%d" % (num & 4 and 1, num & 2 and 1, num & 1), 2), '???') def _get_offset(self, track, sector): """ Return offset (in bytes) for specified track and sector. """ return 0 def _harvest_entries(self): """ Traverse through sectors and store entries in _dir_contents """ sector = self.current_sector_data for dummy in range(8): entry = sector[:32] ftype = _ord(entry[2]) if ftype == 0: # deleted sector = sector[32:] continue type_verbose = self._get_ftype(ftype) protect = _ord(entry[2]) & 64 and "<" or " " fname = entry[5:21] if ftype == 'rel': size = _ord(entry[23]) else: size = _ord(entry[30]) + _ord(entry[31]) * 226 self._dir_contents.append({'fname': self._map_filename(fname), 'ftype': type_verbose, 'size': size, 'protect': protect}) sector = sector[32:] def list_dir(self): """ Return directory list as list of dict with keys: fname, ftype, protect and size """ while self._go_to_next_sector(): self._harvest_entries() return self._dir_contents class D64(Disk): """ Implement d64 directory reader """ def _get_offset(self, track, sector): """ Return offset (in bytes) for specified track and sector. Track Sectors/track # Tracks ----- ------------- --------- 1-17 21 17 18-24 19 7 25-30 18 6 31-40 17 10 """ offset = 0 truncate_track = 0 if track > 17: offset = 17 * 21 * SECLEN truncate_track = 17 if track > 24: offset += 7 * 19 * SECLEN truncate_track = 24 if track > 30: offset += 6 * 18 * SECLEN truncate_track = 30 track = track - truncate_track offset += track * sector * SECLEN return offset class D71(Disk): """ Implement d71 directory reader """ def _get_offset(self, track, sector): """ Return offset (in bytes) for specified track and sector. Track Sec/trk # Tracks -------------- ------- --------- 1-17 (side 0) 21 17 18-24 (side 0) 19 7 25-30 (side 0) 18 6 31-35 (side 0) 17 5 36-52 (side 1) 21 17 53-59 (side 1) 19 7 60-65 (side 1) 18 6 66-70 (side 1) 17 5 """ offset = 0 truncate_track = 0 if track > 17: offset = 17 * 21 * SECLEN truncate_track = 17 if track > 24: offset += 7 * 19 * SECLEN truncate_track = 24 if track > 30: offset += 6 * 18 * SECLEN truncate_track = 30 if track > 35: offset += 5 * 17 * SECLEN truncate_track = 35 if track > 52: offset = 17 * 21 * SECLEN truncate_track = 17 if track > 59: offset += 7 * 19 * SECLEN truncate_track = 24 if track > 65: offset += 6 * 18 * SECLEN truncate_track = 30 track = track - truncate_track offset += track * sector * SECLEN return offset class D81(Disk): """ Implement d81 directory reader """ DIR_TRACK = 40 DIR_SECTOR = 3 FILE_TYPES = {0b000: 'del', 0b001: 'seq', 0b010: 'prg', 0b011: 'usr', 0b100: 'rel', 0b101: 'cbm'} def _get_offset(self, track, sector): """ Return offset (in bytes) for specified track and sector. In d81 is easy, since we have 80 tracks with 40 sectors for 256 bytes each. """ # we wan to go to the beginning (first sector) of the track, not it's # max, so that we need to extract its amount. return (track * 40 - 40) * SECLEN + sector * SECLEN class Uc1541(object): """ Class for interact with c1541 program and MC """ PRG = re.compile(r'(\d+)\s+"([^"]*)".+?\s(del|prg|rel|seq|usr)([\s<])') def __init__(self, archname): self.arch = archname self.out = '' self.err = '' self._verbose = os.getenv("UC1541_VERBOSE", False) self._hide_del = os.getenv("UC1541_HIDE_DEL", False) self.dirlist = _get_implementation(_get_raw(archname)).list_dir() self.file_map = {} self.directory = [] def list(self): """ Output list contents of D64 image. Convert filenames to be Unix filesystem friendly Add suffix to show user what kind of file do he dealing with. """ LOG.info("List contents of %s", self.arch) directory = self._get_dir() # If there is an error reading directory, show the reason to the user if self.out.startswith("Error"): sys.stderr.write(self.out.split("\n")[0] + "\n") return 2 for entry in directory: sys.stdout.write("%(perms)s 1 %(uid)-8d %(gid)-8d %(size)8d " "Jan 01 1980 %(display_name)s\n" % entry) return 0 def rm(self, dst): """ Remove file from D64 image """ LOG.info("Removing file %s", dst) dst = self._get_masked_fname(dst) if not self._call_command('delete', dst=dst): return self._show_error() return 0 def copyin(self, dst, src): """ Copy file to the D64 image. Destination filename has to be corrected. """ LOG.info("Copy into D64 %s as %s", src, dst) dst = self._correct_fname(dst) if not self._call_command('write', src=src, dst=dst): return self._show_error() return 0 def copyout(self, src, dst): """ Copy file form the D64 image. Source filename has to be corrected, since it's representation differ from the real one inside D64 image. """ LOG.info("Copy form D64 %s as %s", src, dst) if not src.endswith(".prg"): return "cannot read" src = self._get_masked_fname(src) if not self._call_command('read', src=src, dst=dst): return self._show_error() return 0 def mkdir(self, dirname): """Not supported""" self.err = "D64 format doesn't support directories" return self._show_error() def run(self, fname): """Not supported""" self.err = "Not supported, unless you are using MC on real C64 ;)" return self._show_error() def _correct_fname(self, fname): """ Return filename with mapped characters, without .prg extension. Characters like $, *, + in filenames are perfectly legal, but c1541 program seem to have issues with it while writing, so it will also be replaced. """ char_map = {'|': "/", "\\": "/", "~": " ", "$": "?", "*": "?"} if fname.lower().endswith(".prg"): fname = fname[:-4] new_fname = [] for char in fname: trans = char_map.get(char) new_fname.append(trans if trans else char) return "".join(new_fname) def _get_masked_fname(self, fname): """ Return masked filename with '?' jokers instead of non ASCII characters, useful for copying or deleting files with c1541. In case of several files with same name exists in directory, only first one will be operative (first as appeared in directory). Warning! If there are two different names but the only difference is in non-ASCII characters (some PET ASCII or control characters) there is a risk that one can remove both files. """ directory = self._get_dir() for entry in directory: if entry['display_name'] == fname: return entry['pattern_name'] def _get_dir(self): """ Retrieve directory via c1541 program """ directory = [] uid = os.getuid() gid = os.getgid() if not self._call_command('list'): return self._show_error() idx = 0 for line in self.out.split("\n"): if Uc1541.PRG.match(line): blocks, fname, ext, rw = Uc1541.PRG.match(line).groups() if ext == 'del' and self._hide_del: continue display_name = ".".join([fname, ext]) pattern_name = self.dirlist[idx]['fname'] if '/' in display_name: display_name = display_name.replace('/', '|') # workaround for space and dash at the beggining of the # filename char_map = {' ': '~', '-': '_'} display_name = "".join([char_map.get(display_name[0], display_name[0]), display_name[1:]]) if ext == 'del': perms = "----------" else: perms = "-r%s-r--r--" % (rw.strip() and "-" or "w") directory.append({'pattern_name': pattern_name, 'display_name': display_name, 'uid': uid, 'gid': gid, 'size': int(blocks) * SECLEN, 'perms': perms}) idx += 1 return directory def _show_error(self): """ Pass out error output from c1541 execution """ if self._verbose: return self.err else: return 1 def _call_command(self, cmd, src=None, dst=None): """ Return status of the provided command, which can be one of: write read delete dir/list """ command = ['c1541', '-attach', self.arch, '-%s' % cmd] if src: command.append(src) if dst: command.append(dst) LOG.debug('executing command: %s', ' '.join(command)) # For some reason using write and delete commands and reading output # confuses Python3 beneath MC and as a consequence MC report an # error...therefore for those commands let's not use # universal_newlines... universal_newlines = True if cmd in ['delete', 'write']: universal_newlines = False self.out, self.err = Popen(command, universal_newlines=universal_newlines, stdout=PIPE, stderr=PIPE).communicate() if self.err: LOG.debug('an err: %s', self.err) return not self.err CALL_MAP = {'list': lambda a: Uc1541(a.arch).list(), 'copyin': lambda a: Uc1541(a.arch).copyin(a.src, a.dst), 'copyout': lambda a: Uc1541(a.arch).copyout(a.src, a.dst), 'mkdir': lambda a: Uc1541(a.arch).mkdir(a.dst), 'rm': lambda a: Uc1541(a.arch).rm(a.dst), 'run': lambda a: Uc1541(a.arch).run(a.dst)} def parse_args(): """Use ArgumentParser to check for script arguments and execute.""" parser = ArgumentParser() subparsers = parser.add_subparsers(help='supported commands', dest='subcommand') subparsers.required = True parser_list = subparsers.add_parser('list', help="List contents of D64 " "image") parser_copyin = subparsers.add_parser('copyin', help="Copy file into D64 " "image") parser_copyout = subparsers.add_parser('copyout', help="Copy file out of " "D64 image") parser_rm = subparsers.add_parser('rm', help="Delete file from D64 image") parser_mkdir = subparsers.add_parser('mkdir', help="Create directory in " "archive") parser_run = subparsers.add_parser('run', help="Execute archived file") parser_list.add_argument('arch', help="D64 Image filename") parser_list.set_defaults(func=CALL_MAP['list']) parser_copyin.add_argument('arch', help="D64 Image filename") parser_copyin.add_argument('src', help="Source filename") parser_copyin.add_argument('dst', help="Destination filename (to be " "written into D64 image)") parser_copyin.set_defaults(func=CALL_MAP['copyin']) parser_copyout.add_argument('arch', help="D64 Image filename") parser_copyout.add_argument('src', help="Source filename (to be read from" " D64 image") parser_copyout.add_argument('dst', help="Destination filename") parser_copyout.set_defaults(func=CALL_MAP['copyout']) parser_rm.add_argument('arch', help="D64 Image filename") parser_rm.add_argument('dst', help="File inside D64 image to be deleted") parser_rm.set_defaults(func=CALL_MAP['rm']) parser_mkdir.add_argument('arch', help="archive filename") parser_mkdir.add_argument('dst', help="Directory name inside archive to " "be created") parser_mkdir.set_defaults(func=CALL_MAP['mkdir']) parser_run.add_argument('arch', help="archive filename") parser_run.add_argument('dst', help="File to be executed") parser_run.set_defaults(func=CALL_MAP['run']) args = parser.parse_args() return args.func(args) def no_parse(): """Failsafe argument "parsing". Note, that it blindly takes positional arguments without checking them. In case of wrong arguments it will silently exit""" try: if sys.argv[1] not in ('list', 'copyin', 'copyout', 'rm', 'mkdir', "run"): sys.exit(2) except IndexError: sys.exit(2) class Arg(object): """Mimic argparse object""" dst = None src = None arch = None arg = Arg() try: arg.arch = sys.argv[2] if sys.argv[1] in ('copyin', 'copyout'): arg.src = sys.argv[3] arg.dst = sys.argv[4] elif sys.argv[1] in ('rm', 'run', 'mkdir'): arg.dst = sys.argv[3] except IndexError: sys.exit(2) return CALL_MAP[sys.argv[1]](arg) if __name__ == "__main__": LOG.debug("Script params: %s", str(sys.argv)) try: from argparse import ArgumentParser PARSE_FUNC = parse_args except ImportError: PARSE_FUNC = no_parse sys.exit(PARSE_FUNC()) PK ! H�q@ @ iso9660nu ȯ�� #! /bin/sh # Midnight Commander - ISO9660 VFS for MC # based on lslR by Tomas Novak <tnovak@ipex.cz> April 2000 # # Copyright (C) 2000, 2003 # The Free Software Foundation, Inc. # # Written by: # Michael Shigorin <mike@altlinux.org>, # Grigory Milev <week@altlinux.org>, # Kachalov Anton <mouse@linux.ru.net>, 2003 # Victor Ananjevsky <ananasik@gmail.com>, 2013 # slava zanko <slavazanko@gmail.com>, 2013 # # This file is part of the Midnight Commander. # # The Midnight Commander is free software: you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation, either version 3 of the License, # or (at your option) any later version. # # The Midnight Commander is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #*** include section (source functions, for example) ******************* #*** file scope functions ********************************************** XORRISO=$(which xorriso 2>/dev/null) xorriso_list() { if test -z "$XORRISO"; then return 1 fi local dir attr ln usr gr sz dt1 dt2 dt3 nm len name lsl r dir="${2:-/}" lsl=$( $XORRISO -abort_on FATAL -dev stdio:"$1" -cd "$dir" -lsl 2> /dev/null ) r=$? test $r -gt 0 && return $r echo "$lsl" | grep "^[-d]" | \ while read attr ln usr gr sz dt1 dt2 dt3 nm ; do len=$((${#nm} - 1)) name=$(printf -- "$nm" | cut -c2-$len) # remove quotes if test $(printf -- "$attr" | cut -c1-1) != "d"; then printf -- "%s %s %s %s %s %s %s %s %s/%s\n" "$attr" "$ln" "$usr" "$gr" "$sz" "$dt1" "$dt2" "$dt3" "$dir" "$name" else xorriso_list "$1" "$dir/$name" fi done } xorriso_copyout() { if test -z "$XORRISO"; then return 1 fi $XORRISO -dev stdio:"$1" -osirrox on -extract "$2" "$3" >/dev/null 2>&1 } xorriso_copyin() { if test -z "$XORRISO"; then return 1 fi $XORRISO -dev stdio:"$1" -cpr "$3" "$2" >/dev/null 2>&1 } xorriso_mkdir() { if test -z "$XORRISO"; then return 1 fi $XORRISO -dev stdio:"$1" -mkdir "$2" >/dev/null 2>&1 } xorriso_rmdir() { if test -z "$XORRISO"; then return 1 fi $XORRISO -dev stdio:"$1" -rmdir "$2" >/dev/null 2>&1 } xorriso_rm() { if test -z "$XORRISO"; then return 1 fi $XORRISO -dev stdio:"$1" -rm "$2" >/dev/null 2>&1 } # tested to comply with isoinfo 2.0's output test_iso () { which isoinfo 2>/dev/null || (echo "isoinfo not found" >&2; return 1) CHARSET=$(locale charmap 2>/dev/null) if test -z "$CHARSET"; then CHARSET=$(locale 2>/dev/null | grep LC_CTYPE | sed -n -e 's/.*\.\(.*\)"$/\1/p') fi if test -n "$CHARSET"; then CHARSET=$(echo "$CHARSET" | tr '[A-Z]' '[a-z]' | sed -e 's/^iso-/iso/') isoinfo -j $CHARSET -i /dev/null 2>&1 | grep "Iconv not yet supported\|Unknown charset" >/dev/null && CHARSET= fi if test -n "$CHARSET"; then JOLIET_OPT="-j $CHARSET -J" else JOLIET_OPT="-J" fi ISOINFO="isoinfo -R" ISOINFO_D_I="$(isoinfo -d -i "$1" 2>/dev/null)" echo "$ISOINFO_D_I" | grep "UCS level 1\|NO Joliet" > /dev/null || ISOINFO="$ISOINFO $JOLIET_OPT" if [ $(echo "$ISOINFO_D_I" | grep "Joliet with UCS level 3 found" | wc -l) = 1 \ -a $(echo "$ISOINFO_D_I" | grep "NO Rock Ridge" | wc -l) = 1 ] ; then SEMICOLON="YES" fi } mcisofs_list () { local lsl r # left as a reminder to implement compressed image support =) case "$1" in *.lz) MYCAT="lzip -dc";; *.lz4) MYCAT="lz4 -dc";; *.lzma) MYCAT="lzma -dc";; *.xz) MYCAT="xz -dc";; *.zst) MYCAT="zstd -dc";; *.bz2) MYCAT="bzip2 -dc";; *.gz) MYCAT="gzip -dc";; *.z) MYCAT="gzip -dc";; *.Z) MYCAT="gzip -dc";; *) MYCAT="cat";; esac lsl=$($ISOINFO -l -i "$1" 2>/dev/null) r=$? test $r -gt 0 && return $r echo "$lsl" | awk -v SEMICOLON=$SEMICOLON ' BEGIN { dir=""; # Pattern to match 8 first fields. rx = "[^ ]+[ ]+"; rx = "^" rx rx rx rx rx rx rx rx; irx = "^\\[ *-?[0-9]* *[0-9]+\\] +"; } /^$/ { next } /^d---------/ { next } /^Directory listing of [^ ].*$/ { dir=substr($0, 23); next; } { $11 != "" } { name=$0 sub(rx, "", name) attr=substr($0, 1, length($0)-length(name)) # strip inodes and extra dir entries; fix perms sub(irx, "", name) sub("^---------- 0 0 0", "-r--r--r-- 1 root root", attr) sub(" $", "", name) # for Joliet UCS level 3 if (SEMICOLON == "YES") sub(";1$", "", name); ## sub(";[0-9]+$", "", name) ## would break copyout # skip . and .. if (name == ".") next; if (name == "..") next; printf "%s%s%s\n", attr, dir, name }' } mcisofs_copyout () { if [ "x$SEMICOLON" = "xYES" ]; then $ISOINFO -i "$1" -x "/$2;1" 2>/dev/null > "$3" else $ISOINFO -i "$1" -x "/$2" 2>/dev/null > "$3" fi } #*** main code ********************************************************* LC_ALL=C cmd="$1" shift case "$cmd" in list) xorriso_list "$@" || { test_iso "$@" || exit 1 mcisofs_list "$@" || exit 1 } exit 0 ;; rm) xorriso_rm "$@" || { exit 1 } exit 0 ;; rmdir) xorriso_rmdir "$@" || { exit 1 } exit 0 ;; mkdir) xorriso_mkdir "$@" || { exit 1 } exit 0 ;; copyin) xorriso_copyin "$@" || { exit 1 } exit 0 ;; copyout) xorriso_copyout "$@" || { test_iso "$@" || exit 1 mcisofs_copyout "$@" || exit 1 } exit 0 ;; esac exit 1 PK ! T�\� � urarnu ȯ�� #! /bin/sh # # Written by andrey joukov # (C) 1996 2:5020/337.13@fidonet.org # Updated by christian.gennerat@alcatel.fr 1999 # Andrew V. Samoilov <sav@bcs.zp.ua> 2000 # # Andrew Borodin <aborodin@vmail.ru> # David Haller <dnh@opensuse.org> # 2013: support unrar5 # # beta version 2.0 # # rar and unrar can be found on http://www.rarlabs.com/ RAR=rar # Prefer unrar (freeware). UNRAR=`which unrar 2>/dev/null` [ -z $UNRAR ] && UNRAR=$RAR [ ! -x $UNRAR -a -x $RAR ] && UNRAR=$RAR # Let the test framework hook in: UNRAR=${MC_TEST_EXTFS_LIST_CMD:-$UNRAR} # Determine the $UNRAR version if [ -n "$MC_TEST_EXTFS_UNRAR_VERSION" ]; then # Let the test framework fool us: UNRAR_VERSION=$MC_TEST_EXTFS_UNRAR_VERSION else # Figure it out from rar itself: UNRAR_VERSION=`$UNRAR -cfg- -? | grep "Copyright" | sed -e 's/.*\([0-9]\)\..*/\1/'` fi mcrar4fs_list () { $UNRAR v -c- -cfg- "$1" | awk -v uid=`id -u` -v gid=`id -g` ' BEGIN { flag=0 } /^-------/ { flag++; if (flag > 1) exit 0; next } flag==1 { str = substr($0, 2) getline split($4, a, "-") if (index($6, "D") != 0) $6="drwxr-xr-x" else if (index($6, ".") != 0) $6="-rw-r--r--" printf "%s 1 %s %s %d %02d/%02d/%02d %s ./%s\n", $6, uid, gid, $1, a[2], a[1], a[3], $5, str }' } mcrar5fs_list () { $UNRAR vt -c- -cfg- "$1" | awk -F ':' -v uid=`id -u` -v gid=`id -g` ' { ### remove space after the ":" of the field name sub ("^ ", "", $2); } $1 ~ /^ *Name$/ { ### next file name = mtime = size = attrs = ""; delete date; name = $2; ### if the name contains ":", append the rest of the fields if (NF > 2) { for (i = 3; i <= NF; i++) { name = name ":" $i; } } } $1 ~ /^ *mtime$/ { mtime = $2 ":" $3; } $1 ~ /^ *Size$/ { size = $2; } $1 ~ /^ *Attributes$/ { attrs = $2; } $1 ~ /^ *Compression$/ { ### file done, using /^$/ is not so good you ### would have to skip the version stuff first ### get date and time split (mtime, date, " "); time = date[2]; ### cut off seconds from the time sub (",[0-9]*$", "", time); ### split for reordering of the date in the printf below split (date[1], date, "-"); ### mc seems to be able to parse 4 digit years too, so remove if tested # sub ("^..", "", date[1]); ### cut year to 2 digits only ### check/adjust rights if (index (attrs, "D") != 0) { attrs = "drwxr-xr-x"; } else { if (index (attrs, ".") != 0) { attrs = "-rw-r--r--"; } } ### and finally printf ("%s 1 %s %s %d %02d/%02d/%02d %s ./%s\n", attrs, uid, gid, size, date[2], date[3], date[1], time, name); } ' } mcrarfs_list () { [ x$UNRAR_VERSION = x6 -o x$UNRAR_VERSION = x5 ] && mcrar5fs_list "$@" || mcrar4fs_list "$@" } mcrarfs_copyin () { # copyin by christian.gennerat@alcatel.fr # preserve pwd. It is clean, but is it necessary? pwd=`pwd` # Create a directory and copy in it the tmp file with the good name mkdir "$3.dir" cd "$3.dir" di="${2%/*}" # if file is to be written upper in the archive tree, make fake dir if test x"$di" != x"${2##*/}" ; then mkdir -p "$di" fi cp -fp "$3" "$3.dir/$2" $RAR a "$1" "$2" >/dev/null cd "$pwd" rm -rf "$3.dir" } mcrarfs_copyout () { $UNRAR p -p- -c- -cfg- -inul "$1" "$2" > "$3" } mcrarfs_mkdir () { # preserve pwd. It is clean, but is it necessary? pwd=`pwd` # Create a directory and create in it a tmp directory with the good name dir=`mktemp -d "${MC_TMPDIR:-/tmp}/mctmpdir-urar.XXXXXX"` || exit 1 cd "$dir" mkdir -p "$2" # rar cannot create an empty directory touch "$2"/.rarfs $RAR a -r "$1" "$2" >/dev/null $RAR d "$1" "$2/.rarfs" >/dev/null cd "$pwd" rm -rf "$dir" } mcrarfs_rm () { $RAR d "$1" "$2" >/dev/null } umask 077 cmd="$1" shift case "$cmd" in # Workaround for a bug in mc - directories must precede files to # avoid duplicate entries, so we sort output by filenames list) mcrarfs_list "$@" | sort -k 8 ;; rm) mcrarfs_rm "$@" ;; rmdir) mcrarfs_rm "$@" ;; mkdir) mcrarfs_mkdir "$@" ;; copyin) mcrarfs_copyin "$@" ;; copyout) mcrarfs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! ���L L hp48+nu ȯ�� #!/bin/sh # # Written by Christofer Edvardsen <ce@earthling.net>, Feb 1998 # # This script makes it possible to view and copy files to/from a hp48 # (tested with a HP48G and the emulator x48) # # To use the hp48 external filesystem: # - read the relevant parts of your HP48 manual # - install kermit # - connect the HP48 to your computer or start x48 # - below change the line which reflects the serial device you use # - configure your HP48 (<left shift> - i/o - iopar): # port: wire # baud: 9600 # transfer format: binary (fast transfers) or # ascii (editable on the pc) # - start the server on the HP48: <left shift> - i/o - srvr - serve # or the shortcut <right shift> - <right arrow> # - on MC's commandline enter "cd hp48://" # # Make sure you have kermit installed and that it's using the right serial # device by changing /dev/ttyXX on the next line AWK=awk KERMIT=${MC_TEST_EXTFS_LIST_CMD:-"kermit -l /dev/ttyS1 -b 9600"} NOW=`date +"%m-%d-%Y %H:%M"` hp48_cmd() { $KERMIT -C "SET EXIT WARNING OFF,REMOTE $1,QUIT" } hp48_cd() { (echo SET EXIT WARNING OFF;echo REMOTE HOST HOME for HP48_DIR in `echo "$1" | tr '/' ' '`;do if [ "x$HP48_DIR" != "x." ];then echo REMOTE HOST "$HP48_DIR"; fi done echo QUIT)| $KERMIT -B >/dev/null } # # Parses the reply to the DIRECTORY command. # # Here's an example reply (taken from [1][2]): # # { HOME } 105617 # STRAY 185.5 Directory 29225 # YEN 30.5 Program 53391 # JYTLIGHT 21848.5 String 62692 # IOPAR 37.5 List 61074 # # The meaning of the fields (according to [3][4]): # # { Current_directory } Free_space # Object_name Object_size_bytes Object_type Object_CRC # ... # # [1] http://newarea48.tripod.com/kermit.html # [2] http://www.hpmuseum.org/forum/thread-4684.html # [3] https://groups.google.com/d/msg/comp.sys.hp48/bYTCu9K3k20/YWQfF--W3EEJ # [4] http://www.columbia.edu/kermit/hp48.html (also has a link to the HP's user manual). # hp48_parser() { HP48_DIRS= read -r INPUT while [ "x$INPUT" != "xEOF" ] do set -- $INPUT obj_name=$1 obj_size=$2 obj_type=$3 obj_size=`echo $obj_size | $AWK '{ print int($0) }'` # Truncates floats to ints; anything else to "0". if [ "$obj_size" != "0" ]; then # Skips the 1st reply line (purportedly there aren't zero-size files b/c, according to resource [4], the size is "including name"). case "$obj_type" in Directory) HP48_DIRS="$HP48_DIRS $obj_name" printf "%crwxr-xr-x 1 %-8d %-8d %8d %s %s\n" 'd' \ 0 0 $obj_size "$NOW" "$HP48_CDIR/$obj_name" ;; *) printf "%crw-r--r-- 1 %-8d %-8d %8d %s %s\n" '-' \ 0 0 $obj_size "$NOW" "$HP48_CDIR/$obj_name" ;; esac fi read -r INPUT done for HP48_DIR in $HP48_DIRS; do HP48_PDIR="$HP48_CDIR" HP48_CDIR="$HP48_CDIR/$HP48_DIR"; hp48_cmd "HOST $HP48_DIR" >/dev/null hp48_list HP48_CDIR="$HP48_PDIR"; hp48_cmd "HOST UPDIR" >/dev/null done } hp48_list() { # It's hard to see why this "EOF" thing is needed. The loop above can be changed to "while read -r obj_name ...". @TODO. { hp48_cmd "DIRECTORY"; echo; echo EOF; } | hp48_parser } # override any locale for dates LC_ALL=C export LC_ALL case "$1" in list) HP48_CDIR= hp48_cmd "HOST HOME" >/dev/null hp48_list exit 0;; copyout) cd "`dirname "$4"`" hp48_cd "`dirname "$3"`" $KERMIT -B -g "`basename "$3"`" -a "$4" >/dev/null exit 0;; copyin) cd "`dirname "$4"`" hp48_cd "`dirname "$3"`" $KERMIT -B -s "$4" -a "`basename "$3"`" >/dev/null exit 0;; esac exit 1 PK ! �p{3 3 s3+nu ȯ�� #! /usr/bin/python # -*- coding: utf-8 -*- # # Midnight Commander compatible EXTFS for accessing Amazon Web Services S3. # Written by Jakob Kemi <jakob.kemi@gmail.com> 2009 # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # Notes: # This EXTFS exposes buckets as directories and keys as files # Due to EXTFS limitations all buckets & keys have to be read initially which might # take quite some time. # Tested on Debian with Python 2.4-2.6 and boto 1.4c and 1.6b # (Python 2.6 might need -W ignore::DeprecationWarning due to boto using # deprecated module Popen2) # # # Installation: # Make sure that boto <http://code.google.com/p/boto> (python-boto in Debian) is installed. # Preferably pytz (package python-tz in Debian) should be installed as well. # # Save as executable file /usr/libexec/mc/extfs/s3 (or wherever your mc expects to find extfs modules) # # Settings: (should be set via environment) # Required: # AWS_ACCESS_KEY_ID : Amazon AWS acces key (required) # AWS_SECRET_ACCESS_KEY : Amazon AWS secret access key (required) # Optional: # MCVFS_EXTFS_S3_LOCATION : where to create new buckets: "EU" - default, "USWest", "APNortheast" etc. # MCVFS_EXTFS_S3_DEBUGFILE : write debug info to this file (no info by default) # MCVFS_EXTFS_S3_DEBUGLEVEL : debug messages level ("WARNING" - default, "DEBUG" - verbose) # # # Usage: # Open dialog "Quick cd" (<alt-c>) and type: s3:// <enter> (or simply type `cd s3://' in shell line) # # # History: # # 2015-07-22 Dmitry Koterov <dmitry.koterov@gmail.com> # - Support for non-ASCII characters in filenames (system encoding detection). # # 2015-05-21 Dmitry Koterov <dmitry.koterov@gmail.com> # - Resolve "Please use AWS4-HMAC-SHA256" error: enforce the new V4 authentication method. # It is required in many (if not all) locations nowadays. # - Now s3+ works with buckets in different regions: locations are auto-detected. # - Debug level specification support (MCVFS_EXTFS_S3_DEBUGLEVEL). # # 2009-02-07 Jakob Kemi <jakob.kemi@gmail.com> # - Updated instructions. # - Improved error reporting. # # 2009-02-06 Jakob Kemi <jakob.kemi@gmail.com> # - Threaded list command. # - Handle rm of empty "subdirectories" (as seen in mc). # - List most recent datetime and total size of keys as directory properties. # - List modification time in local time. # # 2009-02-05 Jakob Kemi <jakob.kemi@gmail.com> # - Initial version. # import sys import os import time import re import datetime import boto from boto.s3.connection import S3Connection from boto.exception import BotoServerError # Get settings from environment USER=os.getenv('USER','0') AWS_ACCESS_KEY_ID=os.getenv('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY=os.getenv('AWS_SECRET_ACCESS_KEY') S3LOCATION=os.getenv('MCVFS_EXTFS_S3_LOCATION', 'EU') DEBUGFILE=os.getenv('MCVFS_EXTFS_S3_DEBUGFILE') DEBUGLEVEL=os.getenv('MCVFS_EXTFS_S3_DEBUGLEVEL', 'WARNING') if not AWS_ACCESS_KEY_ID or not AWS_SECRET_ACCESS_KEY: sys.stderr.write('Missing AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY environment variables.\n') sys.exit(1) # Setup logging if DEBUGFILE: import logging logging.basicConfig( filename=DEBUGFILE, level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s') logging.getLogger('boto').setLevel(getattr(logging, DEBUGLEVEL)) else: class Void(object): def __getattr__(self, attr): return self def __call__(self, *args, **kw): return self logging = Void() logger = logging.getLogger('s3extfs') def __fix_io_encoding(last_resort_default='UTF-8'): """ The following code is needed to work with non-ASCII characters in filenames. We're trying hard to detect the system encoding. """ import codecs import locale for var in ('stdin', 'stdout', 'stderr'): if getattr(sys, var).encoding is None: enc = None if enc is None: try: enc = locale.getpreferredencoding() except: pass if enc is None: try: enc = sys.getfilesystemencoding() except: pass if enc is None: try: enc = sys.stdout.encoding except: pass if enc is None: enc = last_resort_default setattr(sys, var, codecs.getwriter(enc)(getattr(sys, var), 'strict')) __fix_io_encoding() def threadmap(fun, iterable, maxthreads=16): """ Quick and dirty threaded version of builtin method map. Propagates exception safely. """ from threading import Thread import Queue items = list(iterable) nitems = len(items) if nitems < 2: return map(fun, items) # Create and fill input queue input = Queue.Queue() output = Queue.Queue() for i,item in enumerate(items): input.put( (i,item) ) class WorkThread(Thread): """ Takes one item from input queue (thread terminates when input queue is empty), performs fun, puts result in output queue """ def run(self): while True: try: (i,item) = input.get_nowait() try: result = fun(item) output.put( (i,result) ) except: output.put( (None,sys.exc_info()) ) except Queue.Empty: return # Start threads for i in range( min(len(items), maxthreads) ): t = WorkThread() t.setDaemon(True) t.start() # Wait for all threads to finish & collate results ret = [] for i in range(nitems): try: i,res = output.get() if i == None: raise res[0],res[1],res[2] except Queue.Empty: break ret.append(res) return ret logger.debug('started') if S3LOCATION.upper() == "EU": S3LOCATION = "eu-central-1" if S3LOCATION.upper() == "US": S3LOCATION = "us-east-1" for att in dir(boto.s3.connection.Location): v = getattr(boto.s3.connection.Location, att) if type(v) is str and att.lower() == S3LOCATION.lower(): S3LOCATION = v break logger.debug('Using location %s for new buckets', S3LOCATION) def get_connection(location): """ Creates a connection to the specified region. """ os.environ['S3_USE_SIGV4'] = 'True' # only V4 method is supported in all locations. return boto.s3.connect_to_region( location, aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY ) # Global S3 default connection. s3 = get_connection('us-east-1') def get_bucket(name): """ Returns a bucket by its name, no matter what region is it in. """ try: b = s3.get_bucket(name, validate=False) b.get_location() # just to raise an exception on error return b except boto.exception.S3ResponseError, e: # Seems this is the only proper way to switch to the bucket's region. # Requesting of the default region for "?location" does not work unfortunately. m = re.search(r'<Region>(.*?)</Region>', e.body) if m: return get_connection(m.group(1)).get_bucket(name) raise logger.debug('argv: ' + str(sys.argv)) try: cmd = sys.argv[1] args = sys.argv[2:] except: sys.stderr.write('This program should be called from within MC\n') sys.exit(1) def handleServerError(msg): e = sys.exc_info() msg += ', reason: ' + e[1].reason logger.error(msg, exc_info=e) sys.stderr.write(msg+'\n') sys.exit(1) # # Lists all S3 contents # if cmd == 'list': if len(args) > 0: path = args[0] else: path = '' logger.info('list') rs = s3.get_all_buckets() # Import python timezones (pytz) try: import pytz except: logger.warning('Missing pytz module, timestamps will be off') # A fallback UTC tz stub class pytzutc(datetime.tzinfo): def __init__(self): datetime.tzinfo.__init__(self) self.utc = self self.zone = 'UTC' def utcoffset(self, dt): return datetime.timedelta(0) def tzname(self, dt): return "UTC" def dst(self, dt): return datetime.timedelta(0) pytz = pytzutc() # Find timezone # (yes, timeZONE as in _geographic zone_ not EST/CEST or whatever crap we get from time.tzname) # http://regebro.wordpress.com/2008/05/10/python-and-time-zones-part-2-the-beast-returns/ def getGuessedTimezone(): # 1. check TZ env. var try: tz = os.getenv('TZ', '') return pytz.timezone(tz) except: pass # 2. check if /etc/timezone exists (Debian at least) try: if os.path.isfile('/etc/timezone'): tz = open('/etc/timezone', 'r').readline().strip() return pytz.timezone(tz) except: pass # 3. check if /etc/localtime is a _link_ to something useful try: if os.path.islink('/etc/localtime'): link = os.readlink('/etc/localtime') tz = '/'.join(link.split(os.path.sep)[-2:]) return pytz.timezone(tz) except: pass # 4. use time.tzname which will probably be wrong by an hour 50% of the time. try: return pytz.timezone(time.tzname[0]) except: pass # 5. use plain UTC ... return pytz.utc tz=getGuessedTimezone() logger.debug('Using timezone: ' + tz.zone) # AWS time is on format: 2009-01-07T16:43:39.000Z # we "want" MM-DD-YYYY hh:mm (in localtime) expr = re.compile(r'^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.\d{3}Z$') def convDate(awsdatetime): m = expr.match(awsdatetime) ye,mo,da,ho,mi,se = map(int,m.groups()) dt = datetime.datetime(ye,mo,da,ho,mi,se, tzinfo=pytz.utc) return dt.astimezone(tz).strftime('%m-%d-%Y %H:%M') def bucketList(b): b = get_bucket(b.name) # get the bucket at its own region totsz = 0 mostrecent = '1970-01-01T00:00:00.000Z' ret = [] for k in b.list(): if k.name.endswith('/'): # Sometimes someone create S3 keys which are ended with "/". # Extfs cannot work with them as with files, and such keys may # hide same-name directories, so we skip them. continue mostrecent = max(mostrecent, k.last_modified) datetime = convDate(k.last_modified) ret.append('%10s %3d %-8s %-8s %d %s %s\n' % ( '-rw-r--r--', 1, USER, USER, k.size, datetime, b.name+'/'+k.name) ) totsz += k.size datetime=convDate(mostrecent) sys.stdout.write('%10s %3d %-8s %-8s %d %s %s\n' % ( 'drwxr-xr-x', 1, USER, USER, totsz, datetime, b.name) ) for line in ret: sys.stdout.write(line) threadmap(bucketList, rs) # # Fetch file from S3 # elif cmd == 'copyout': archivename = args[0] storedfilename = args[1] extractto = args[2] bucket,key = storedfilename.split('/', 1) logger.info('copyout bucket: %s, key: %s'%(bucket, key)) try: b = get_bucket(bucket) k = b.get_key(key) out = open(extractto, 'w') k.open(mode='r') for buf in k: out.write(buf) k.close() out.close() except BotoServerError: handleServerError('Unable to fetch key "%s"'%(key)) # # Upload file to S3 # elif cmd == 'copyin': archivename = args[0] storedfilename = args[1] sourcefile = args[2] bucket,key = storedfilename.split('/', 1) logger.info('copyin bucket: %s, key: %s'%(bucket, key)) try: b = get_bucket(bucket) k = b.new_key(key) k.set_contents_from_file(fp=open(sourcefile,'r')) except BotoServerError: handleServerError('Unable to upload key "%s"' % (key)) # # Remove file from S3 # elif cmd == 'rm': archivename = args[0] storedfilename = args[1] bucket,key = storedfilename.split('/', 1) logger.info('rm bucket: %s, key: %s'%(bucket, key)) try: b = get_bucket(bucket) b.delete_key(key) except BotoServerError: handleServerError('Unable to remove key "%s"' % (key)) # # Create directory # elif cmd == 'mkdir': archivename = args[0] dirname = args[1] logger.info('mkdir dir: %s' %(dirname)) if '/' in dirname: logger.warning('skipping mkdir') pass else: bucket = dirname try: get_connection(S3LOCATION).create_bucket(bucket, location=S3LOCATION) except BotoServerError: handleServerError('Unable to create bucket "%s"' % (bucket)) # # Remove directory # elif cmd == 'rmdir': archivename = args[0] dirname = args[1] logger.info('rmdir dir: %s' %(dirname)) if '/' in dirname: logger.warning('skipping rmdir') pass else: bucket = dirname try: b = get_bucket(bucket) b.connection.delete_bucket(b) except BotoServerError: handleServerError('Unable to delete bucket "%s"' % (bucket)) # # Run from S3 # elif cmd == 'run': archivename = args[0] storedfilename = args[1] arguments = args[2:] bucket,key = storedfilename.split('/', 1) logger.info('run bucket: %s, key: %s'%(bucket, key)) os.execv(storedfilename, arguments) else: logger.error('unhandled, bye') sys.exit(1) logger.debug('command handled') sys.exit(0) PK ! �*A� ulhanu ȯ�� #! /bin/sh # # LHa Virtual filesystem executive v0.1 # Copyright (C) 1996, 1997 Joseph M. Hinkle # May be distributed under the terms of the GNU Public License # <jhinkle@rockisland.com> # # Code for mc_lha_fs_run() suggested by: # Jan 97 Zdenek Kabelac <kabi@informatics.muni.cz> # Tested with mc 3.5.18 and gawk 3.0.0 on Linux 2.0.0 # Tested with lha v1.01 and lharc v1.02 # Information and sources for other forms of lha/lzh appreciated # Nota bene: # There are several compression utilities which produce *.lha files. # LHArc and LHa in exist several versions, and their listing output varies. # Another variable is the architecture on which the compressed file was made. # This program attempts to sort out the variables known to me, but it is likely # to display an empty panel if it encounters a mystery. # In that case it will be useful to execute this file from the command line: # ./lha list Mystery.lha # to examine the output directly on the console. The output string must be # precisely in the format described in the README in this directory. # Caveat emptor. # Learn Latin. # Define your awk AWK=awk # Define which archiver you are using with appropriate options LHA_LIST="lha lq" LHA_GET="lha pq" LHA_PUT="lha aq" # The 'list' command executive mc_lha_fs_list() { # List the contents of the archive and sort it out $LHA_LIST "$1" | $AWK -v uid=`id -nu` -v gid=`id -ng` ' # Strip a leading '/' if present in a filepath $(NF) ~ /^\// { $(NF) = substr($NF,2) } # Print the line this way if there is no permission string $1 ~ /^\[.*\]/ { # Invent a generic permission $1 = ($NF ~ /\/$/) ? "drwxr-xr-x":"-rwxr--r--"; # Print it printf "%s 1 %-8s %-8s %-8d %s %s %s %s\n", $1, uid, gid, $2, $4, $5, $6, $7; # Get the next line of the list next; } # Do it this way for a defined permission $1 !~ /^\[.*\]/ { # If the permissions and UID run together if ($1 ~ /\//) { $8 = $7; $7 = $6; $6 = $5; $5 = $4; $3 = $2; $2 = substr($1,10); $1 = substr($1,1,9); } # If the permission string is missing a type if (length($1) == 9) { if ($NF ~ /\/$/) $1 = ("d" $1); else $1 = ("-" $1); } # UID:GID might not be the same as on your system so print numbers # Well, that is the intent. At the moment mc is translating them. split($2, id, "/"); printf "%s 1 %-8d %-8d %-8d %s %s %s %s\n", $1, id[1], id[2], $3, $5, $6, $7, $8; # Get the next line of the list next; } ' } # The 'copyout' command executive to copy displayed files to a destination mc_lha_fs_copyout() { $LHA_GET "$1" "$2" > "$3" } # The 'copyin' command executive to add something to the archive mc_lha_fs_copyin () { NAME2=`basename "$2"`; DIR2=${2%$NAME2} NAME3=`basename "$3"`; DIR3=${3%$NAME3} cd "${DIR3}" ONE2=${2%%/*} [ -n "${ONE2}" ] || exit 1 [ -e "${ONE2}" ] && exit 1 [ -e "${DIR2}" ] || mkdir -p "${DIR2}" ln "$3" "$2" || exit 1 $LHA_PUT "$1" "$2" rm -r "${ONE2}" } # The 'run' command executive to run a command from within an archive mc_lha_fs_run() { TMPDIR=`mktemp -d "${MC_TMPDIR:-/tmp}/mctmpdir-ulha.XXXXXX"` || exit 1 trap "rm -rf \"$TMPDIR\"; exit 0" 1 2 3 4 15 TMPCMD=$TMPDIR/run $LHA_GET "$1" "$2" > $TMPCMD chmod a+x "$TMPCMD" "$TMPCMD" rm -rf "$TMPDIR" } # The main routine umask 077 cmd="$1" shift case "$cmd" in list) mc_lha_fs_list "$@" ;; copyout) mc_lha_fs_copyout "$@" ;; copyin) mc_lha_fs_copyin "$@" ;; run) mc_lha_fs_run "$@" ;; *) exit 1 ;; esac exit 0 PK ! =�=�' ' audionu ȯ�� #! /bin/sh # # Written by Pavel Machek # CDDB support by Adam Byrtek # # (C) 2000 The Free Software Foundation. # set -e CDDB_SERVER="http://freedb.freedb.org" CDDB_HANDSHAKE="hello=user+localhost+mc+1.0&proto=1" CDDB_TIMEOUT=20 # in seconds audiofs_list() { DATE=`date +"%b %d %H:%M"` echo "-r--r--r-- 1 0 0 0 $DATE CDDB" cdparanoia -Q -d "$1" 2>&1 | grep '^[ 0-9][ 0-9][ 0-9]\.' | while read A B C do A=`echo "$A" | sed -e 's/\.//' -e 's/^\(.\)$/0\1/'` SIZE=`expr 44 + $B \* 2352` echo "-r--r--r-- 1 0 0 $SIZE $DATE track-${A}.wav" done } audiofs_copyout() { if [ x"$2" = x"CDDB" ]; then DISCID=`cd-discid "$1" | tr " " "+"` if [ -z "$DISCID" ]; then exit 1 fi RESPONSE=`wget -q -T $CDDB_TIMEOUT -O - "$CDDB_SERVER/~cddb/cddb.cgi?cmd=cddb+query+$DISCID&$CDDB_HANDSHAKE" | tee "$3" | awk '/^200/ { print $2,$3; }'` wget -q -T $CDDB_TIMEOUT -O - "$CDDB_SERVER/~cddb/cddb.cgi?cmd=cddb+read+$RESPONSE&$CDDB_HANDSHAKE" | grep -v "^#" >> "$3" else TRACK=`echo "$2" | sed 's/track-0*//' | sed 's/\.wav//'` cdparanoia -q -d "$1" "$TRACK" "$3" >/dev/null fi } if [ ! -b "$2" ] then BASE="/dev/cdrom" else BASE="$2" fi case "$1" in list) audiofs_list "$BASE"; exit 0;; copyout) audiofs_copyout "$BASE" "$3" "$4"; exit 0;; esac exit 1 PK ! �7`T� � unarnu ȯ�� #! /bin/sh # Written by Ilia Maslakov <il.smind@gmail.com> # # (C) 2020 The Free Software Foundation. # Define awk AWK=awk # Define which archiver you are using with appropriate options UNAR_LIST="lsar " UNAR_GET="unar " # The 'list' command executive mc_unar_fs_list() { # List the contents of the archive and sort it out $UNAR_LIST -l "$1" | $AWK -v uid=`id -nu` -v gid=`id -ng` ' BEGIN { flag = 0 } /^\(Flags/ {next} /^\(Mode/ {next} { flag++; if (flag < 4) next pr="-r--r--r--" if (index($2, "D") != 0) pr="dr-xr-xr-x" split($6, a, "-") split($7, b, ":") printf "%s 1 %s %s %d %02d/%02d/%02d %02d:%02d %s\n", pr, uid, gid, $3, a[3], a[2], a[1], b[1], b[2], $8 }' } # The 'copyout' command executive to copy displayed files to a destination mc_unar_fs_copyout () { TMPDIR=`mktemp -d "${MC_TMPDIR:-/tmp}/mctmpdir-uha.XXXXXX"` || exit 1 $UNAR_GET "$1" "$2" -o "$TMPDIR" >/dev/null we=`basename "$1" | sed -E 's|^(.*?)\.\w+$|\1|'` cat "$TMPDIR/$we/$2" > "$3" cd / rm -rf "$TMPDIR" } # The main routine umask 077 cmd="$1" shift case "$cmd" in list) mc_unar_fs_list "$@" ;; copyout) mc_unar_fs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! ~���&# &# dpkg+nu ȯ�� #! /usr/bin/perl # # 1999 (c) Piotr Roszatycki <dexter@debian.org> # This software is under GNU license # last modification: 1999-12-08 # # dpkg sub quote { $_ = shift(@_); s/([^\w\/.+-])/\\$1/g; return($_); } sub bt { my ($dt) = @_; my (@time); @time = localtime($dt); $bt = sprintf "%02d-%02d-%d %02d:%02d", $time[4] + 1, $time[3], $time[5] + 1900, $time[2], $time[1]; return $bt; } sub ft { my ($f) = @_; return "d" if -d $f; return "l" if -l $f; return "p" if -p $f; return "S" if -S $f; return "b" if -b $f; return "c" if -c $f; return "-"; } sub fm { my ($n) = @_; my ($m); if( $n & 0400 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0200 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 04000 ) { $m .= "s"; } elsif( $n & 0100 ) { $m .= "x"; } else { $m .= "-"; } if( $n & 0040 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0020 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 02000 ) { $m .= "s"; } elsif( $n & 0010 ) { $m .= "x"; } else { $m .= "-"; } if( $n & 0004 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0002 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 01000 ) { $m .= "t"; } elsif( $n & 0001 ) { $m .= "x"; } else { $m .= "-"; } return $m; } sub ls { my ($file,$path,$mode) = @_; if (-f $file) { my @stat = stat(_); # mode, nlink, uid, gid, size, mtime, filename printf "%s %d %d %d %d %s %s\n", $mode || ft($file).fm($stat[2] & 07777), $stat[3], $stat[4], $stat[5], $stat[7], bt($stat[9]), $path; } } $DATE=bt(time()); sub list { my ($pkg, $fn, $dn, $sz, $bt); my %debs = (); my %sects = (); my($diversions,$architecture); chop($diversions = `dpkg-divert --list 2>/dev/null`); chop($architecture = `dpkg-architecture 2>/dev/null`); chop($list = `dpkg -l '*' 2>/dev/null`); chop($getselections = `dpkg --get-selections 2>/dev/null`); chop($audit = `dpkg --audit 2>/dev/null`); $sz = length($diversions); print "-r--r--r-- 1 root root $sz $DATE DIVERSIONS\n"; $sz = length($architecture); print "-r--r--r-- 1 root root $sz $DATE ARCHITECTURE\n"; $sz = length($list); print "-r--r--r-- 1 root root $sz $DATE LIST\n"; $sz = length($getselections); print "-r--r--r-- 1 root root $sz $DATE GET-SELECTIONS\n"; $sz = length($audit); print "-r--r--r-- 1 root root $sz $DATE AUDIT\n"; $sz = length($pressconfigure); print "-r-xr--r-- 1 root root $sz $DATE CONFIGURE\n"; $sz = length($pressremove); print "-r-xr--r-- 1 root root $sz $DATE REMOVE\n"; $sz = length($pressclearavail); print "-r-xr--r-- 1 root root $sz $DATE CLEAR-AVAIL\n"; $sz = length($pressforgetoldunavail); print "-r-xr--r-- 1 root root $sz $DATE FORGET-OLD-UNAVAIL\n"; ls("/var/lib/dpkg/status","STATUS","-r--r--r--"); # ls("/var/lib/dpkg/available","AVAILABLE","-r--r--r--"); print "drwxr-xr-x 1 root root 0 $DATE all\n"; open STAT, "/var/lib/dpkg/status" or exit 1; while( <STAT> ) { chop; if( /^([\w-]*): (.*)/ ) { $pkg = $2 if( lc($1) eq 'package' ); $debs{$pkg}{lc($1)} = $2; } } close STAT; foreach $pkg (sort keys %debs) { next if $debs{$pkg}{status} =~ /not-installed/; $fn = $debs{$pkg}{package}. "_". $debs{$pkg}{version}; $dn = $debs{$pkg}{section}; if( ! $dn ) { $dn = "unknown"; } elsif( $dn =~ /^(non-us)$/i ) { $dn .= "/main"; } elsif( $dn !~ /\// ) { $dn = "main/". $dn; } unless( $sects{$dn} ) { my $sub = $dn; while( $sub =~ s!^(.*)/[^/]*$!$1! ) { unless( $sects{$sub} ) { print "drwxr-xr-x 1 root root 0 $DATE $sub/\n"; $sects{$sub} = 1; } } print "drwxr-xr-x 1 root root 0 $DATE $dn/\n"; $sects{$dn} = 1; } $sz = $debs{$pkg}{'status'} =~ /config-files/ ? 0 : $debs{$pkg}{'installed-size'} * 1024; @stat = stat("/var/lib/dpkg/info/".$debs{$pkg}{package}.".list"); $bt = bt($stat[9]); print "-rw-r--r-- 1 root root $sz $bt $dn/$fn.debd\n"; print "lrwxrwxrwx 1 root root $sz $bt all/$fn.debd -> ../$dn/$fn.debd\n"; } } sub copyout { my($archive,$filename) = @_; my $qfilename = quote($filename); if( $archive eq 'DIVERSIONS' ) { system("dpkg-divert --list > $qfilename 2>/dev/null"); } elsif( $archive eq 'ARCHITECTURE' ) { system("dpkg-architecture > $qfilename 2>/dev/null"); } elsif( $archive eq 'LIST' ) { system("dpkg -l '*' > $qfilename 2>/dev/null"); } elsif( $archive eq 'AUDIT' ) { system("dpkg --audit > $qfilename 2>/dev/null"); } elsif( $archive eq 'GET-SELECTIONS' ) { system("dpkg --get-selections > $qfilename 2>/dev/null"); } elsif( $archive eq 'STATUS' ) { system("cp /var/lib/dpkg/status $qfilename"); } elsif( $archive eq 'AVAILABLE' ) { system("cp /var/lib/dpkg/available $qfilename"); } elsif( $archive eq 'CONFIGURE' ) { open O, ">$filename"; print O $pressconfigure; close O; } elsif( $archive eq 'REMOVE' ) { open O, ">$filename"; print O $pressremove; close O; } elsif( $archive eq 'CLEAR-AVAIL' ) { open O, ">$filename"; print O $pressclearavail; close O; } elsif( $archive eq 'FORGET-OLD-UNAVAIL' ) { open O, ">$filename"; print O $pressforgetoldunavail; close O; } else { open O, ">$filename"; print O $archive, "\n"; close O; } } # too noisy but less dangerouse sub copyin { my($archive,$filename) = @_; my $qfilename = quote($filename); if( $archive =~ /\.deb$/ ) { system("dpkg -i $qfilename>/dev/null"); } else { die "extfs: cannot create regular file \`$archive\': Permission denied\n"; } } sub run { my($archive,$filename) = @_; if( $archive eq 'CONFIGURE' ) { system("dpkg --pending --configure"); } elsif( $archive eq 'REMOVE' ) { system("dpkg --pending --remove"); } elsif( $archive eq 'CLEAR-AVAIL' ) { system("dpkg --clear-avail"); } elsif( $archive eq 'FORGET-OLD-UNAVAIL' ) { system("dpkg --forget-old-unavail"); } else { die "extfs: $filename: command not found\n"; } } # Disabled - too dangerous and too noisy sub rm_disabled { my($archive) = @_; if( $archive =~ /\.debd?$/ ) { my $qname = quote($archive); $qname =~ s%.*/%%g; $qname =~ s%_.*%%g; system("if dpkg -s $qname | grep ^Status | grep -qs config-files; \ then dpkg --purge $qname>/dev/null; \ else dpkg --remove $qname>/dev/null; fi"); die("extfs: $archive: Operation not permitted\n") if $? != 0; } else { die "extfs: $archive: Operation not permitted\n"; } } $pressconfigure=<<EOInstall; WARNING Don\'t use this method if you are not willing to configure all non configured packages. This is not a real file. It is a way to configure all non configured packages. To configure packages go back to the panel and press Enter on this file. EOInstall $pressremove=<<EOInstall; WARNING Don\'t use this method if you are not willing to remove all unselected packages. This is not a real file. It is a way to remove all unselected packages. To remove packages go back to the panel and press Enter on this file. EOInstall $pressforgetoldunavail=<<EOInstall; WARNING Don\'t use this method if you are not willing to forget about uninstalled unavailable packages. This is not a real file. It is a way to forget about uninstalled unavailable packages. To forget this information go back to the panel and press Enter on this file. EOInstall $pressclearavail=<<EOInstall; WARNING Don\'t use this method if you are not willing to erase the existing information about what packages are available. This is not a real file. It is a way to erase the existing information about what packages are available. To clear this information go back to the panel and press Enter on this file. EOInstall # override any locale for dates $ENV{"LC_ALL"}="C"; if ($ARGV[0] eq "list") { list(); exit(0); } elsif ($ARGV[0] eq "copyout") { copyout($ARGV[2], $ARGV[3]); exit(0); } elsif ($ARGV[0] eq "copyin") { copyin($ARGV[2], $ARGV[3]); exit(0); } elsif ($ARGV[0] eq "run") { run($ARGV[2],$ARGV[3]); exit(0); } #elsif ($ARGV[0] eq "rm") { rm($ARGV[2]); exit(0); } exit(1); PK ! oD�~� � debnu ȯ�� #! /usr/bin/perl # # Written by Fernando Alegre <alegre@debian.org> 1996 # # Applied patch by Dimitri Maziuk <emaziuk@curtin.edu.au> 1997 # (to handle new tar format) # # Modified by Fernando Alegre <alegre@debian.org> 1997 # (to handle both new and old tar formats) # # Modified by Patrik Rak <prak@post.cz> 1998 # (add by Michael Bramer Debian-mc-maintainer <grisu@debian.org>) # (to allow access to package control files) # # Modified by Martin Bialasinski <martinb@debian.org> 1999 # (deal with change in tar format) # # # Copyright (C) 1997 Free Software Foundation # sub quote { $_ = shift(@_); s/([^\w\/.+-])/\\$1/g; return($_); } sub mcdebfs_list { # # CAVEAT: Hard links are listed as if they were symlinks # Empty directories do not appear at all # local($archivename)=@_; local $qarchivename = quote($archivename); chop($date=`LC_ALL=C date "+%b %d %H:%M"`); chop($info_size=`dpkg -I $qarchivename | wc -c`); $install_size=length($pressinstall); print "dr-xr-xr-x 1 root root 0 $date CONTENTS\n"; print "dr-xr-xr-x 1 root root 0 $date DEBIAN\n"; print "-r--r--r-- 1 root root $info_size $date INFO\n"; print "-r-xr--r-- 1 root root $install_size $date INSTALL\n"; if ( open(PIPEIN, "LC_ALL=C dpkg-deb -c $qarchivename |") ) { while(<PIPEIN>) { @_ = split; $perm=$_[0]; $owgr=$_[1]; $size=$_[2]; if($_[3] =~ /^\d\d\d\d\-/) { # New tar format ($year,$mon,$day) = split(/-/,$_[3]); $month = ("Gee","Jan","Feb","Mar","Apr","May","Jun", "Jul","Aug","Sep","Oct","Nov","Dec")[$mon] || "Gee"; $time=$_[4]; $pathindex=5; } else { $mstring='GeeJanFebMarAprMayJunJulAugSepOctNovDec'; $month=$_[3]; $mon=index($mstring,$month) / 3; $day=$_[4]; $time=$_[5]; $year=$_[6]; $pathindex=7; } $path=$_[$pathindex++]; # remove leading ./ $path=~s/^\.\///; next if ($path eq ''); $arrow=$_[$pathindex++]; $link=$_[$pathindex++]; $link2=$_[$pathindex++]; $owgr=~s!/! !; if($arrow eq 'link') { # report hard links as soft links $arrow='->'; $link="/$link2"; substr($perm, 0, 1) = "l"; } if($arrow ne '') { $arrow=' ' . $arrow; $link= ' ' . $link; } $now=`date "+%Y %m"`; ($thisyear, $thismon) = split(/ /, $now); # show time for files younger than 6 months # but not for files with dates in the future if ($year * 12 + $mon > $thisyear * 12 + $thismon - 6 && $year * 12 + $mon <= $thisyear * 12 + $thismon) { print "$perm 1 $owgr $size $month $day $time CONTENTS/$path$arrow$link\n"; } else { print "$perm 1 $owgr $size $month $day $year CONTENTS/$path$arrow$link\n"; } } } if ( open(PIPEIN, "LC_ALL=C dpkg-deb -I $qarchivename |") ) { while(<PIPEIN>) { @_ = split; $size=$_[0]; last if $size =~ /:/; next if $size !~ /\d+/; if($_[4] eq '*') { $perm='-r-xr-xr-x'; $name=$_[5]; } else { $perm='-r--r--r--'; $name=$_[4]; } print "$perm 1 root root $size $date DEBIAN/$name\n"; } } } sub mcdebfs_copyout { local($archive,$filename,$destfile)=@_; local $qarchive = quote($archive); local $qfilename = quote($filename); local $qdestfile = quote($destfile); if($filename eq "INFO") { system("dpkg-deb -I $qarchive > $qdestfile"); } elsif($filename =~ /^DEBIAN/) { $qfilename=~s!^DEBIAN/!!; system("dpkg-deb -I $qarchive $qfilename > $qdestfile"); } elsif($filename eq "INSTALL") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressinstall; close FILEOUT; system("chmod a+x $qdestfile"); } } else { # files can be prepended with ./ or not, depending on the version of tar $qfilename=~s!^CONTENTS/!!; system("dpkg-deb --fsys-tarfile $qarchive | tar xOf - $qfilename ./$qfilename > $qdestfile 2>/dev/null"); } } sub mcdebfs_run { local($archive,$filename)=@_; local $qarchive = quote($archive); if($filename eq "INSTALL") { print "Installing $archive\n"; system("dpkg -i $qarchive"); } else { use File::Temp qw(mkdtemp); my $template = "/tmp/mcdebfs.run.XXXXXX"; $template="$ENV{MC_TMPDIR}/mcdebfs.XXXXXX" if ($ENV{MC_TMPDIR}); $tmpdir = mkdtemp($template); $tmpcmd="$tmpdir/run"; &mcdebfs_copyout($archive, $filename, $tmpcmd); system("chmod u+x $tmpcmd"); system($tmpcmd); unlink($tmpcmd); rmdir($tmpdir); } } $pressinstall=<<EOInstall; WARNING Don\'t use this method if you are not willing to reinstall everything... This is not a real file. It is a way to install the package you are browsing. To install this package go back to the panel and press Enter on this file. In Debian systems, a package is automatically upgraded when you install a new version of it. There is no special upgrade option. Install always works. EOInstall umask 077; if($ARGV[0] eq "list") { shift; &mcdebfs_list(@ARGV); exit 0; } elsif($ARGV[0] eq "copyout") { shift; &mcdebfs_copyout(@ARGV); exit 0; } elsif($ARGV[0] eq "run") { shift; &mcdebfs_run(@ARGV); exit 0; } exit 1; PK ! �Wp� � a+nu ȯ�� #! /usr/bin/perl -w # # External filesystem for mc, using mtools # Written Ludek Brukner <lubr@barco.cz>, 1997 # Much improved by Tom Perkins <968794022@noid.net>, 2000 # # WARNING - This software is ALPHA - Absolutely NO WARRANTY # # These mtools components must be in PATH for this to work sub quote { $_ = shift(@_); s/([^\w\/.+-])/\\$1/g; return($_); } $mmd = "mmd"; $mrd = "mrd"; $mdel = "mdel"; $mdir = "mdir -a"; $mcopy = "mcopy -noQ"; $0 =~ s|.*/||; $qdisk = quote($0); $ENV{MTOOLS_DATE_STRING} = "mm-dd-yyyy"; $ENV{MTOOLS_TWENTY_FOUR_HOUR_CLOCK} = "1"; SWITCH: for ( $ARGV[0] ) { /list/ && do { @dirs = get_dirs(""); while ($dir = shift(@dirs)) { push @dirs, get_dirs("$dir/"); } exit 0; }; /mkdir/ && do { shift; shift; exit 1 if scalar(@ARGV) != 1; $qname = quote($ARGV[0]); system("$mmd $qdisk:/$qname >/dev/null"); exit 0; }; /rmdir/ && do { shift; shift; exit 1 if scalar(@ARGV) != 1; $qname = quote($ARGV[0]); system("$mrd $qdisk:/$qname >/dev/null"); exit 0; }; /rm/ && do { shift; shift; exit 1 if scalar(@ARGV) != 1; $qname = quote($ARGV[0]); system("$mdel $qdisk:/$qname >/dev/null"); exit 0; }; /copyout/ && do { shift; shift; exit 1 if scalar(@ARGV) != 2; ( $qsrc, $qdest ) = @ARGV; $qsrc = quote($qsrc); $qdest = quote($qdest); system("$mcopy $qdisk:/$qsrc $qdest >/dev/null"); exit 0; }; /copyin/ && do { shift; shift; exit 1 if scalar(@ARGV) != 2; ( $qdest, $qsrc ) = @ARGV; $qsrc = quote($qsrc); $qdest = quote($qdest); system("$mcopy $qsrc $qdisk:/$qdest >/dev/null"); exit 0; }; /.*/ && do { # an unfamiliar command exit 1; }; } sub get_dirs { my ($path, $name, $size, $date, $time, $longname, @lst, @rv); $path = shift(@_); my $qpath = quote($path); @rv = (); open(FILE,"$mdir $qdisk:/$qpath |"); while ( <FILE> ) { chomp(); /^ / && next; # ignore `non-file' lines m{^Directory for $0:/}i && next; # ignore `non-file' lines /^$/ && next; # ignore empty lines /^\.\.?/ && next; # ignore `.' and `..' $name = substr($_,0,12); $name =~ s/^([^ ]*) +([^ ]+)[ \t]*$/$1.$2/; $name =~ s/[ .]+$//; $_ = substr($_,12); s/^[ ]+//; ($size,$date,$time,$longname) = split(/[ \t]+/, $_, 4); defined $time || next; # process "am" and "pm". Should not be needed if # MTOOLS_TWENTY_FOUR_HOUR_CLOCK is respected. @lst = split(/([:ap])/, $time); $lst[0] += 12 if (defined $lst[3] && $lst[3] eq "p"); $time = sprintf("%02d:%02d", $lst[0], $lst[2]); @lst = split(/-/, $date); $lst[2] %= 100 if ($lst[2] > 100); $date = sprintf ("%02d-%02d-%02d", @lst); $name = $path . lc(($longname) ? $longname : $name); if ($size =~ /DIR/) { printf("drwxr-xr-x 1 %-8d %-8d %8d %s %s %s\n", 0, 0, 0, $date, $time, $name); push @rv, $name; } else { printf("-rw-r--r-- 1 %-8d %-8d %8d %s %s %s\n", 0, 0, $size, $date, $time, $name); } } close(FILE); return @rv; } 1; PK ! >��S� � trpmnu ȯ�� #! /bin/sh # # Browse contents of an installed RPM package. # This filesystem works on the entries of the "rpms" filesystem. # # Written by Erik Troan (ewt@redhat.com) 1996 # Jakub Jelinek (jj@sunsite.mff.cuni.cz) 1996 # Tomasz K�oczko (kloczek@rudy.mif.pg.gda.pl) 1997 # minor changes by Wojtek Pilorz (wpilorz@bdk.lublin.pl) 1997 # minor changes by Michele Marziani (marziani@fe.infn.it) 1997 # slight changes to put rpm to Trpm by Balazs Nagy (julian7@kva.hu) 1998 # locale bugfix by Michal Svec (rebel@penguin.cz) 2000 # (C) 1996 The Free Software Foundation. # # # override any locale for dates unset LC_ALL LC_TIME=C export LC_TIME if rpm --nosignature --version >/dev/null 2>&1; then RPM="rpm --nosignature" else RPM="rpm" fi mcrpmfs_list () { # set MCFASTRPM_DFLT to 1 for faster rpm files handling by default, to 0 for # slower handling MCFASTRPM_DFLT=0 if test -z "$MCFASTRPM"; then MCFASTRPM=$MCFASTRPM_DFLT fi FILEPREF="-r--r--r-- 1 root root " DESC=`$RPM -qi -- "$1"` DATE=`$RPM -q --qf "%{BUILDTIME:date}" -- "$1" | cut -c 5-11,21-24` HEADERSIZE=`echo "$DESC" | wc -c` echo "-r--r--r-- 1 root root $HEADERSIZE $DATE HEADER" echo "-r-xr-xr-x 1 root root 40 $DATE UNINSTALL" echo "dr-xr-xr-x 3 root root 0 $DATE INFO" echo "$FILEPREF 0 $DATE INFO/NAME-VERSION-RELEASE" echo "$FILEPREF 0 $DATE INFO/GROUP" echo "$FILEPREF 0 $DATE INFO/BUILDHOST" echo "$FILEPREF 0 $DATE INFO/SOURCERPM" if test "$MCFASTRPM" = 0 ; then test "`$RPM -q --qf \"%{DISTRIBUTION}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/DISTRIBUTION" test "`$RPM -q --qf \"%{VENDOR}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/VENDOR" test "`$RPM -q --qf \"%{DESCRIPTION}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/DESCRIPTION" test "`$RPM -q --qf \"%{SUMMARY}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/SUMMARY" if test "`$RPM -q --qf \"%{RPMTAG_PREIN}%{RPMTAG_POSTIN}%{RPMTAG_PREUN}%{RPMTAG_POSTUN}%{VERIFYSCRIPT}\" -- "$1"`" != "(none)(none)(none)(none)(none)"; then echo "dr-xr-xr-x 1 root root 0 $DATE INFO/SCRIPTS" test "`$RPM -q --qf \"%{RPMTAG_PREIN}\" -- "$1"`" = '(none)' || echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PREIN" test "`$RPM -q --qf \"%{RPMTAG_POSTIN}\" -- "$1"`" = '(none)' || echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTIN" test "`$RPM -q --qf \"%{RPMTAG_PREUN}\" -- "$1"`" = '(none)' || echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PREUN" test "`$RPM -q --qf \"%{RPMTAG_POSTUN}\" -- "$1"`" = '(none)' || echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTUN" test "`$RPM -q --qf \"%{VERIFYSCRIPT}\" -- "$1"`" = '(none)' || echo "$FILEPREF 0 $DATE INFO/SCRIPTS/VERIFYSCRIPT" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/ALL" fi else echo "$FILEPREF 0 $DATE INFO/DISTRIBUTION" echo "$FILEPREF 0 $DATE INFO/VENDOR" echo "$FILEPREF 0 $DATE INFO/DESCRIPTION" echo "$FILEPREF 0 $DATE INFO/SUMMARY" echo "dr-xr-xr-x 1 root root 0 $DATE INFO/SCRIPTS" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PREIN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTIN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/PREUN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/POSTUN" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/VERIFYSCRIPT" echo "$FILEPREF 0 $DATE INFO/SCRIPTS/ALL" fi if test "$MCFASTRPM" = 0 ; then test "`$RPM -q --qf \"%{PACKAGER}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/PACKAGER" test "`$RPM -q --qf \"%{URL}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/URL" test "`$RPM -q --qf \"%{EPOCH}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/EPOCH" test "`$RPM -q --qf \"%{LICENSE}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/LICENSE" else echo "$FILEPREF 0 $DATE INFO/PACKAGER" echo "$FILEPREF 0 $DATE INFO/URL" echo "$FILEPREF 0 $DATE INFO/EPOCH" echo "$FILEPREF 0 $DATE INFO/LICENSE" fi echo "$FILEPREF 0 $DATE INFO/BUILDTIME" echo "$FILEPREF 0 $DATE INFO/RPMVERSION" echo "$FILEPREF 0 $DATE INFO/OS" echo "$FILEPREF 0 $DATE INFO/SIZE" if test "$MCFASTRPM" != 0 ; then $RPM -q --qf "[%{REQUIRENAME}\n]" -- "$1" | grep "(none)" > /dev/null || echo "$FILEPREF 0 $DATE INFO/REQUIRENAME" $RPM -q --qf "[%{OBSOLETES}\n]" -- "$1" | grep "(none)" > /dev/null || echo "$FILEPREF 0 $DATE INFO/OBSOLETES" $RPM -q --qf "[%{PROVIDES}\n]" -- "$1" | grep "(none)" > /dev/null || echo "$FILEPREF 0 $DATE INFO/PROVIDES" $RPM -q --qf "[%{CONFLICTS}\n]" -- "$1" | grep "(none)" > /dev/null || echo "$FILEPREF 0 $DATE INFO/CONFLICTS" test "`$RPM -q --qf \"%{CHANGELOGTEXT}\" -- "$1"`" = "(none)" || echo "$FILEPREF 0 $DATE INFO/CHANGELOG" else echo "$FILEPREF 0 $DATE INFO/REQUIRENAME" echo "$FILEPREF 0 $DATE INFO/OBSOLETES" echo "$FILEPREF 0 $DATE INFO/PROVIDES" echo "$FILEPREF 0 $DATE INFO/CONFLICTS" echo "$FILEPREF 0 $DATE INFO/CHANGELOG" fi $RPM -qlv -- "$1" | grep '^[A-Za-z0-9-]' } mcrpmfs_copyout () { case "$2" in HEADER) $RPM -qi -- "$1" > "$3"; exit 0;; UNINSTALL) echo "# Run this to uninstall this RPM package" > "$3"; exit 0;; INFO/NAME-VERSION-RELEASE) $RPM -q --qf "%{NAME}-%{VERSION}-%{RELEASE}\n" -- "$1" > "$3"; exit 0;; INFO/RELEASE) $RPM -q --qf "%{RELEASE}\n" -- "$1" > "$3"; exit 0;; INFO/GROUP) $RPM -q --qf "%{GROUP}\n" -- "$1" > "$3"; exit 0;; INFO/DISTRIBUTION) $RPM -q --qf "%{DISTRIBUTION}\n" -- "$1" > "$3"; exit 0;; INFO/VENDOR) $RPM -q --qf "%{VENDOR}\n" -- "$1" > "$3"; exit 0;; INFO/BUILDHOST) $RPM -q --qf "%{BUILDHOST}\n" -- "$1" > "$3"; exit 0;; INFO/SOURCERPM) $RPM -q --qf "%{SOURCERPM}\n" -- "$1" > "$3"; exit 0;; INFO/DESCRIPTION) $RPM -q --qf "%{DESCRIPTION}\n" -- "$1" > "$3"; exit 0;; INFO/PACKAGER) $RPM -q --qf "%{PACKAGER}\n" -- "$1" > "$3"; exit 0;; INFO/URL) $RPM -q --qf "%{URL}\n" -- "$1" > "$3"; exit 0;; INFO/BUILDTIME) $RPM -q --qf "%{BUILDTIME:date}\n" -- "$1" > "$3"; exit 0;; INFO/EPOCH) $RPM -q --qf "%{EPOCH}\n" -- "$1" > "$3"; exit 0;; INFO/LICENSE) $RPM -q --qf "%{LICENSE}\n" -- "$1" > "$3"; exit 0;; INFO/RPMVERSION) $RPM -q --qf "%{RPMVERSION}\n" -- "$1" > "$3"; exit 0;; INFO/REQUIRENAME) $RPM -q --qf "[%{REQUIRENAME} %{REQUIREFLAGS:depflags} %{REQUIREVERSION}\n]" -- "$1" > "$3"; exit 0;; INFO/OBSOLETES) $RPM -q --qf "[%{OBSOLETENAME} %|OBSOLETEFLAGS?{%{OBSOLETEFLAGS:depflags} %{OBSOLETEVERSION}}:{}|\n]" -- "$1" > "$3"; exit 0;; INFO/PROVIDES) $RPM -q --qf "[%{PROVIDES}\n]" -- "$1" > "$3"; exit 0;; INFO/CONFLICTS) $RPM -q --qf "[%{CONFLICTS}\n]" -- "$1" > "$3"; exit 0;; INFO/SCRIPTS/PREIN) $RPM -q --qf "%{RPMTAG_PREIN}\n" -- "$1" > "$3"; exit 0;; INFO/SCRIPTS/POSTIN) $RPM -q --qf "%{RPMTAG_POSTIN}\n" -- "$1" > "$3"; exit 0;; INFO/SCRIPTS/PREUN) $RPM -q --qf "%{RPMTAG_PREUN}\n" -- "$1" > "$3"; exit 0;; INFO/SCRIPTS/POSTUN) $RPM -q --qf "%{RPMTAG_POSTUN}\n" -- "$1" > "$3"; exit 0;; INFO/SCRIPTS/VERIFYSCRIPT) $RPM -q --qf "%{VERIFYSCRIPT}\n" -- "$1" > "$3"; exit 0;; INFO/SCRIPTS/ALL) $RPM -q --scripts -- "$1" > "$3"; exit 0;; INFO/SUMMARY) $RPM -q --qf "%{SUMMARY}\n" -- "$1" > "$3"; exit 0;; INFO/OS) $RPM -q --qf "%{OS}\n" -- "$1" > "$3"; exit 0;; INFO/CHANGELOG) $RPM -q --qf "[* %{CHANGELOGTIME:date} %{CHANGELOGNAME}\n%{CHANGELOGTEXT}\n\n]\n" -- "$1" > "$3"; exit 0;; INFO/SIZE) $RPM -q --qf "%{SIZE} bytes\n" -- "$1" > "$3"; exit 0;; *) cp "/$2" "$3" esac } mcrpmfs_run () { case "$2" in UNINSTALL) echo "Uninstalling $1"; rpm -e -- "$1"; exit 0;; esac } name=`sed 's/.*\///;s/\.trpm$//' "$2"` case "$1" in list) mcrpmfs_list "$name"; exit 0;; copyout) mcrpmfs_copyout "$name" "$3" "$4"; exit 0;; run) mcrpmfs_run "$name" "$3"; exit 1;; esac exit 1 PK ! ��D�� � lslRnu ȯ�� #! /bin/sh # Based on previous version of lslR # Modified by Tomas Novak <tnovak@ipex.cz> April 2000 # (to allow spaces in filenames) # # It's assumed that lslR was generated in C locale. LC_ALL=C export LC_ALL=C AWK=awk mclslRfs_list () { case "$1" in *.lz) MYCAT="lzip -dc";; *.lz4) MYCAT="lz4 -dc";; *.lzma) MYCAT="lzma -dc";; *.xz) MYCAT="xz -dc";; *.zst) MYCAT="zstd -dc";; *.bz2) MYCAT="bzip2 -dc";; *.gz) MYCAT="gzip -dc";; *.z) MYCAT="gzip -dc";; *.Z) MYCAT="gzip -dc";; *) MYCAT="cat";; esac MYCAT=${MC_TEST_EXTFS_LIST_CMD:-$MYCAT} # Let the test framework hook in. $MYCAT "$1" | $AWK ' BEGIN { dir=""; empty=1; rx = "[^ ]+[ ]+"; # Pattern to match 7 first fields. rx7 = "^" rx rx rx rx rx rx "[^ ]+[ ]"; # Pattern to match 8 first fields. rx8 = "^" rx rx rx rx rx rx rx "[^ ]+[ ]"; } /^total\ [0-9]*$/ { next } /^$/ { empty=1; next } empty==1 && /:$/ { empty=0 if ($0 ~ /^\//) dir=substr($0, 2); else dir=$0; if (dir ~ /\/:$/) sub(/:$/, "", dir); else sub(/:$/, "/", dir); if (dir ~ /^[ ]/) dir="./"dir; next; } ( NF > 7 ) { empty=0 # gensub() is not portable. name=$0 i=index($6, "-") if (i) { sub(rx7, "", name) NF = 7 $6=substr($6,i+1)"-"substr($6,1,i-1) } else { sub(rx8, "", name) NF = 8 } printf "%s %s%s\n", $0, dir, name } { empty=0 }' } case "$1" in list) mclslRfs_list "$2"; exit 0;; esac exit 1 PK ! �6[��% �% apt+nu ȯ�� #! /usr/bin/perl # # 1999 (c) Piotr Roszatycki <dexter@debian.org> # This software is under GNU license # last modification: 1999-12-08 # # apt sub quote { $_ = shift(@_); s/([^\w\/.+-])/\\$1/g; return($_); } sub bt { my ($dt) = @_; my (@time); @time = localtime($dt); $bt = sprintf "%02d-%02d-%d %02d:%02d", $time[4] + 1, $time[3], $time[5] + 1900, $time[2], $time[1]; return $bt; } sub ft { my ($f) = @_; return "d" if -d $f; return "l" if -l $f; return "p" if -p $f; return "S" if -S $f; return "b" if -b $f; return "c" if -c $f; return "-"; } sub fm { my ($n) = @_; my ($m); if( $n & 0400 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0200 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 04000 ) { $m .= "s"; } elsif( $n & 0100 ) { $m .= "x"; } else { $m .= "-"; } if( $n & 0040 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0020 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 02000 ) { $m .= "s"; } elsif( $n & 0010 ) { $m .= "x"; } else { $m .= "-"; } if( $n & 0004 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0002 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 01000 ) { $m .= "t"; } elsif( $n & 0001 ) { $m .= "x"; } else { $m .= "-"; } return $m; } sub ls { my ($file,$path,$mode) = @_; if (-f $file) { my @stat = stat(_); # mode, nlink, uid, gid, size, mtime, filename printf "%s %d %d %d %d %s %s\n", $mode || ft($file).fm($stat[2] & 07777), $stat[3], $stat[4], $stat[5], $stat[7], bt($stat[9]), $path; } } $DATE=bt(time()); sub list { my ($pkg, $fn, $dn, $sz, $bt); my($check,$stats,$config); chop($check = `apt-get -q check 2>/dev/null`); chop($available = `apt-cache dumpavail 2>/dev/null`); chop($stats = `apt-cache stats 2>/dev/null`); chop($config = `apt-config dump 2>&1`); $sz = length($check); print "-r--r--r-- 1 root root $sz $DATE CHECK\n"; $sz = length($available); print "-r--r--r-- 1 root root $sz $DATE AVAILABLE\n"; $sz = length($stats); print "-r--r--r-- 1 root root $sz $DATE STATS\n"; $sz = length($config); print "-r--r--r-- 1 root root $sz $DATE CONFIG\n"; $sz = length($pressupdate); print "-r-xr--r-- 1 root root $sz $DATE UPDATE\n"; $sz = length($pressupgrade); print "-r-xr--r-- 1 root root $sz $DATE UPGRADE\n"; print "-r-xr--r-- 1 root root $sz $DATE DIST-UPGRADE\n"; ls("/etc/apt/sources.list","sources.list"); ls('/etc/apt/apt.conf','apt.conf') if (-f '/etc/apt/apt.conf'); print "drwxr-xr-x 1 root root 0 $DATE all\n"; if ( open(PIPEIN, "find /var/cache/apt/archives -type f |") ) { while(<PIPEIN>) { chop; next if /\/lock$/; my $file = $_; s%/var/cache/apt/archives/%CACHE/%; ls($file, $_); } close PIPEIN; } my %sects = (); my %debd = (); my %deba = (); open STAT, "/var/lib/dpkg/status" or exit 1; while( <STAT> ) { chop; if( /^([\w-]*): (.*)/ ) { $pkg = $2 if( lc($1) eq 'package' ); $debd{$pkg}{lc($1)} = $2; } } close STAT; foreach $pkg (sort keys %debd) { next if $debd{$pkg}{status} =~ /not-installed/; $fn = $debd{$pkg}{package}. "_". $debd{$pkg}{version}; $dn = $debd{$pkg}{section}; if( ! $dn ) { $dn = "unknown"; } elsif( $dn =~ /^(non-us)$/i ) { $dn .= "/main"; } elsif( $dn !~ /\// ) { $dn = "main/". $dn; } unless( $sects{$dn} ) { my $sub = $dn; while( $sub =~ s!^(.*)/[^/]*$!$1! ) { unless( $sects{$sub} ) { print "drwxr-xr-x 1 root root 0 $DATE $sub/\n"; $sects{$sub} = 1; } } print "drwxr-xr-x 1 root root 0 $DATE $dn/\n"; $sects{$dn} = 1; } $sz = $debd{$pkg}{'status'} =~ /config-files/ ? 0 : $debd{$pkg}{'installed-size'} * 1024; @stat = stat("/var/lib/dpkg/info/".$debd{$pkg}{package}.".list"); $bt = bt($stat[9]); print "-rw-r--r-- 1 root root $sz $bt $dn/$fn.debd\n"; print "lrwxrwxrwx 1 root root $sz $bt all/$fn.debd -> ../$dn/$fn.debd\n"; } open STAT, "apt-cache dumpavail |" or exit 1; while( <STAT> ) { chop; if( /^([\w-]*): (.*)/ ) { $pkg = $2 if( lc($1) eq 'package' ); $deba{$pkg}{lc($1)} = $2; } } close STAT; foreach $pkg (sort keys %deba) { next if $deba{$pkg}{version} eq $debd{$pkg}{version}; $fn = $deba{$pkg}{package}. "_". $deba{$pkg}{version}; $dn = $deba{$pkg}{section}; if( ! $dn ) { $dn = "unknown"; } elsif( $dn =~ /^(non-us)$/i ) { $dn .= "/main"; } elsif( $dn !~ /\// ) { $dn = "main/". $dn; } unless( $sects{$dn} ) { my $sub = $dn; while( $sub =~ s!^(.*)/[^/]*$!$1! ) { unless( $sects{$sub} ) { print "drwxr-xr-x 1 root root 0 $DATE $sub/\n"; $sects{$sub} = 1; } } print "drwxr-xr-x 1 root root 0 $DATE $dn/\n"; $sects{$dn} = 1; } $sz = $deba{$pkg}{'status'} =~ /config-files/ ? 0 : $deba{$pkg}{'installed-size'} * 1024; print "-rw-r--r-- 1 root root $sz $DATE $dn/$fn.deba\n"; print "lrwxrwxrwx 1 root root $sz $DATE all/$fn.deba -> ../$dn/$fn.deba\n"; } } sub copyout { my($archive,$filename) = @_; my $qarchive = quote($archive); my $qfilename = quote($filename); if( $archive eq 'CHECK' ) { system("apt-get -q check > $qfilename"); } elsif( $archive eq 'AVAILABLE' ) { system("apt-cache dumpavail > $qfilename"); } elsif( $archive eq 'STATS' ) { system("apt-cache stats > $qfilename"); } elsif( $archive eq 'CONFIG' ) { system("(apt-config dump 2>&1) > $qfilename"); } elsif( $archive eq 'UPDATE' ) { open O, ">$filename"; print O $pressupdate; close O; } elsif( $archive eq 'UPGRADE' || $archive eq 'DIST-UPGRADE' ) { open O, ">$filename"; print O $pressupgrade; close O; } elsif( $archive eq 'apt.conf' ) { system("cp /etc/apt/apt.conf $qfilename"); } elsif( $archive eq 'sources.list' ) { system("cp /etc/apt/sources.list $qfilename"); } elsif( $archive =~ /^CACHE\// ) { $archive =~ s%^CACHE/%/var/cache/apt/archives/%; system("cp $qarchive $qfilename"); } else { open O, ">$filename"; print O $archive, "\n"; close O; } } sub copyin { my($archive,$filename) = @_; my $qarchive = quote($archive); my $qfilename = quote($filename); if( $archive =~ /\.deb$/ ) { system("dpkg -i $qfilename>/dev/null"); } elsif( $archive eq 'apt.conf' ) { system("cp $qfilename /etc/apt/apt.conf"); } elsif( $archive eq 'sources.list' ) { system("cp $qfilename /etc/apt/sources.list"); } elsif( $archive =~ /^CACHE\// ) { $qarchive =~ s%^CACHE/%/var/cache/apt/archives/%; system("cp $qfilename $qarchive"); } else { die "extfs: cannot create regular file \`$archive\': Permission denied\n"; } } sub run { my($archive,$filename) = @_; if( $archive eq 'UPDATE' ) { system("apt-get update"); } elsif( $archive eq 'UPGRADE' ) { system("apt-get upgrade -u"); } elsif( $archive eq 'DIST-UPGRADE' ) { system("apt-get dist-upgrade -u"); } else { die "extfs: $archive: command not found\n"; } } sub rm { my($archive) = @_; my $qarchive = quote($archive); if( $archive =~ /^CACHE\// ) { $qarchive =~ s%^CACHE/%/var/cache/apt/archives/%; system("rm -f $qarchive"); } elsif( $archive eq 'apt.conf' ) { system("rm -f /etc/apt/apt.conf"); } elsif( $archive eq 'sources.list' ) { system("rm -f /etc/apt/sources.list"); } elsif( $archive =~ /\.debd?$/ ) { # uncommented and changed to use dpkg - alpha my $qname = $qarchive; $qname =~ s%.*/%%g; $qname =~ s%_.*%%g; system("dpkg --remove $qname >/dev/null"); die("extfs: $archive: Operation not permitted\n") if $? != 0; } else { die "extfs: $archive: Operation not permitted\n"; } } $pressupdate=<<EOInstall; WARNING Don\'t use this method if you don't want to retrieve new lists of packages. ========================================================================== This is not a real file. It is a way to retrieve new lists of packages. To update this information go back to the panel and press Enter on this file. EOInstall $pressupgrade=<<EOInstall; WARNING Don\'t use this method if you are not willing to perform an upgrade. =================================================================== This is not a real file. It is a way to perform an upgrade. To upgrade this information go back to the panel and press Enter on this file. EOInstall # override any locale for dates $ENV{"LC_ALL"}="C"; if ($ARGV[0] eq "list") { list(); exit(0); } elsif ($ARGV[0] eq "copyout") { copyout($ARGV[2], $ARGV[3]); exit(0); } elsif ($ARGV[0] eq "copyin") { copyin($ARGV[2], $ARGV[3]); exit(0); } elsif ($ARGV[0] eq "run") { run($ARGV[2]); exit(0); } elsif ($ARGV[0] eq "rm") { rm($ARGV[2]); exit(0); } exit(1); PK ! 砜�) ) debdnu ȯ�� #! /usr/bin/perl # # 1999 (c) Piotr Roszatycki <dexter@debian.org> # This software is under GNU license # last modification: 1999-12-08 # # debd sub quote { $_ = shift(@_); s/([^\w\/.+-])/\\$1/g; return($_); } sub bt { my ($dt) = @_; my (@time); @time = localtime($dt); $bt = sprintf "%02d-%02d-%d %02d:%02d", $time[4] + 1, $time[3], $time[5] + 1900, $time[2], $time[1]; return $bt; } sub ft { my ($f) = @_; return "d" if -d $f; return "l" if -l $f; return "p" if -p $f; return "S" if -S $f; return "b" if -b $f; return "c" if -c $f; return "-"; } sub fm { my ($n) = @_; my ($m); if( $n & 0400 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0200 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 04000 ) { $m .= "s"; } elsif( $n & 0100 ) { $m .= "x"; } else { $m .= "-"; } if( $n & 0040 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0020 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 02000 ) { $m .= "s"; } elsif( $n & 0010 ) { $m .= "x"; } else { $m .= "-"; } if( $n & 0004 ) { $m .= "r"; } else { $m .= "-"; } if( $n & 0002 ) { $m .= "w"; } else { $m .= "-"; } if( $n & 01000 ) { $m .= "t"; } elsif( $n & 0001 ) { $m .= "x"; } else { $m .= "-"; } return $m; } sub ls { my ($file) = @_; my @stat = stat($file); # mode, nlink, uid, gid, size, mtime, filename printf "%s%s %d %d %d %d %s CONTENTS%s\n", ft($file), fm($stat[2] & 07777), $stat[3], $stat[4], $stat[5], $stat[7], bt($stat[9]), $file; } sub list { my($archive)=@_; my $qarchive = quote($archive); chop($date=`LC_ALL=C date "+%m-%d-%Y %H:%M"`); chop($info_size=`dpkg -s $qarchive | wc -c`); $repack_size=length($pressrepack); $reinstall_size=length($pressreinstall); $remove_size=length($pressremove); $purge_size=length($presspurge); $reconfigure_size=length($pressreconfigure); $reinstall_size=length($pressreinstall); $select_size=length($pressselect); $unselect_size=length($pressunselect); print "dr-xr-xr-x 1 root root 0 $date CONTENTS\n"; print "dr-xr-xr-x 1 root root 0 $date DEBIAN\n"; print "-r--r--r-- 1 root root $info_size $date INFO\n"; print "-r-xr--r-- 1 root root $purge_size $date DPKG-PURGE\n"; chop($status = `dpkg -s $qarchive | grep ^Status`); if( $status =~ /deinstall/ ) { print "-r-xr--r-- 1 root root $select_size $date DPKG-SELECT\n"; } elsif( $status =~ /install/ ) { print "-r-xr--r-- 1 root root $unselect_size $date DPKG-UNSELECT\n"; } if( $status !~ /config-files/ ) { if ( -x "/usr/bin/dpkg-repack" ) { print "-r-xr--r-- 1 root root $repack_size $date DPKG-REPACK\n"; } print "-r-xr--r-- 1 root root $remove_size $date DPKG-REMOVE\n"; if ( -x "/usr/bin/apt-get" ) { print "-r-xr--r-- 1 root root $remove_size $date APT-REMOVE\n"; print "-r-xr--r-- 1 root root $reinstall_size $date APT-REINSTALL\n"; print "-r-xr--r-- 1 root root $purge_size $date APT-PURGE\n"; } } if( -x "/usr/bin/dpkg-reconfigure" && -x "/var/lib/dpkg/info/$archive.config" ) { print "-r-xr--r-- 1 root root $reconfigure_size $date DPKG-RECONFIGURE\n"; } if ( open(PIPEIN, "LC_TIME=C LANG=C ls -l /var/lib/dpkg/info/$qarchive.* |") ) { while(<PIPEIN>) { chop; next if /\.list$/; s%/var/lib/dpkg/info/$archive.%DEBIAN/%; print $_, "\n"; } close PIPEIN; } if ( open(LIST, "/var/lib/dpkg/info/$archive.list") ) { while(<LIST>) { chop; ls($_); } close LIST; } } sub copyout { my($archive,$filename,$destfile)=@_; my $qarchive = quote($archive); my $qfilename = quote($filename); my $qdestfile = quote($destfile); if($filename eq "INFO") { system("dpkg -s $qarchive > $qdestfile"); } elsif($filename eq "DPKG-REPACK") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressrepack; close FILEOUT; system("chmod a+x $qdestfile"); } } elsif($filename =~ /^DEBIAN/) { $qfilename=~s!^DEBIAN/!!; system("cat /var/lib/dpkg/info/$qarchive.$qfilename > $qdestfile"); } elsif($filename eq "DPKG-REMOVE" || $filename eq "APT-REMOVE") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressremove; close FILEOUT; system("chmod a+x $qdestfile"); } } elsif($filename eq "DPKG-PURGE" || $filename eq "APT-PURGE") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $presspurge; close FILEOUT; system("chmod a+x $qdestfile"); } } elsif($filename eq "DPKG-RECONFIGURE") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressreconfigure; close FILEOUT; system("chmod a+x $qdestfile"); } } elsif($filename eq "APT-REINSTALL") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressreinstall; close FILEOUT; system("chmod a+x $destfile"); } } elsif($filename eq "DPKG-SELECT") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressselect; close FILEOUT; system("chmod a+x $destfile"); } } elsif($filename eq "DPKG-UNSELECT") { if ( open(FILEOUT,">$destfile") ) { print FILEOUT $pressunselect; close FILEOUT; system("chmod a+x $qdestfile"); } } else { $qfilename=~s!^CONTENTS!!; system("cat $qfilename > $qdestfile"); } } sub run { my($archive,$filename)=@_; my $qarchive = quote($archive); my $qfilename = quote($filename); if($filename eq "DPKG-REMOVE") { system("dpkg --remove $qarchive"); } elsif($filename eq "APT-REMOVE") { system("apt-get remove $qarchive"); } elsif($filename eq "DPKG-PURGE") { system("dpkg --purge $qarchive"); } elsif($filename eq "APT-PURGE") { system("apt-get --purge remove $qarchive"); } elsif($filename eq "DPKG-REPACK") { system("dpkg-repack $qarchive"); } elsif($filename eq "DPKG-SELECT") { system("echo $aqrchive install | dpkg --set-selections"); } elsif($filename eq "DPKG-UNSELECT") { system("echo $qarchive deinstall | dpkg --set-selections"); } elsif($filename eq "APT-REINSTALL") { system("apt-get -u --reinstall install $qarchive"); } elsif($filename eq "DPKG-RECONFIGURE") { system("dpkg-reconfigure $qarchive"); } elsif($filename=~/^DEBIAN/) { $qfilename=~s!^DEBIAN!!; system("/var/lib/dpkg/info/$qarchive.$qfilename"); } else { $qfilename=~s!^CONTENTS!!; system($qfilename); } } $pressrepack=<<EOInstall; WARNING Don\'t use this method if you are not willing to repack this package... This is not a real file. It is a way to repack the package you are browsing. To repack this package go back to the panel and press Enter on this file. EOInstall $pressreinstall=<<EOInstall; WARNING Don\'t use this method if you are not willing to reinstall this package... This is not a real file. It is a way to reinstall the package you are browsing. To reinstall this package go back to the panel and press Enter on this file. EOInstall $pressremove=<<EOInstall; WARNING Don\'t use this method if you are not willing to remove this package... This is not a real file. It is a way to remove the package you are browsing. To remove this package go back to the panel and press Enter on this file. EOInstall $presspurge=<<EOInstall; WARNING Don\'t use this method if you are not willing to purge this package... This is not a real file. It is a way to purge the package you are browsing. To purge this package go back to the panel and press Enter on this file. EOInstall $pressreconfigure=<<EOInstall; WARNING Don\'t use this method if you are not willing to reconfigure this package... This is not a real file. It is a way to reconfigure the package you are browsing. To reconfigure this package go back to the panel and press Enter on this file. EOInstall $pressreinstall=<<EOInstall; WARNING Don\'t use this method if you are not willing to reinstall this package... This is not a real file. It is a way to reinstall the package you are browsing. To reinstall this package go back to the panel and press Enter on this file. EOInstall $pressselect=<<EOInstall; WARNING Don\'t use this method if you are not willing to select this package... This is not a real file. It is a way to select the package you are browsing. To select this package go back to the panel and press Enter on this file. EOInstall $pressunselect=<<EOInstall; WARNING Don\'t use this method if you are not willing to unselect this package... This is not a real file. It is a way to unselect the package you are browsing. To unselect this package go back to the panel and press Enter on this file. EOInstall umask 077; chop($name = `if [ -f "$ARGV[1]" ]; then cat $ARGV[1]; else echo $ARGV[1]; fi`); $name =~ s%.*/([0-9a-z.-]*)_.*%$1%; exit 1 unless $name; if($ARGV[0] eq "list") { &list($name); exit 0; } elsif($ARGV[0] eq "copyout") { ©out($name,$ARGV[2],$ARGV[3]); exit 0; } elsif($ARGV[0] eq "run") { &run($name,$ARGV[2]); exit 0; } exit 1; PK ! s��# # uarcnu ȯ�� #! /bin/sh # # ARC Virtual filesystem executive # Copyright (C) 2008 Jacques Pelletier # May be distributed under the terms of the GNU Public License # <jpelletier@ieee.org> # # Define which archiver you are using with appropriate options ARC_LIST=${MC_TEST_EXTFS_LIST_CMD:-"arc v"} ARC_GET="arc x" ARC_PUT="arc a" ARC_DEL="arc d" # The 'list' command executive mc_arc_fs_list() { if [ "x$UID" = "x" ]; then UID=`id -ru 2>/dev/null` if [ "x$UID" = "x" ]; then UID=0 fi fi $ARC_LIST "$1" | awk -v uid=$UID ' BEGIN { # Copied from uzoo.in. split("Jan:Feb:Mar:Apr:May:Jun:Jul:Aug:Sep:Oct:Nov:Dec", month_list, ":") for (i=1; i<=12; i++) { month[month_list[i]] = i } } /^Name/ { next } /===/ { next } /^Total/ { next } { if ($8 > 50) $8=$8 + 1900 else $8=$8 + 2000 split($9, a, ":") # convert AM/PM to 00-23 if (a[2] ~ /a$|p$/) { if (a[2] ~ /p$/) a[1] = a[1]+12 a[2]=substr(a[2],1,2) } printf "-rw-r--r-- 1 %-8d %-8d %8d %02d-%02d-%04d %02d:%02d %s\n", uid, 0, $2, month[$7], $6, $8, a[1], a[2], $1 }' 2>/dev/null exit 0 } # Command: copyout archivename storedfilename extractto mc_arc_fs_copyout() { $ARC_GET "$1" "$2" 2> /dev/null mv "$2" "$3" } # Command: copyin archivename storedfilename sourcefile mc_arc_fs_copyin() { mv "$3" "$2" $ARC_PUT "$1" "$2" 2> /dev/null } # Command: rm archivename storedfilename mc_arc_fs_rm() { $ARC_DEL "$1" "$2" 2> /dev/null } # The main routine umask 077 cmd="$1" shift case "$cmd" in list) mc_arc_fs_list "$@" ;; copyout) mc_arc_fs_copyout "$@" ;; copyin) mc_arc_fs_copyin "$@" ;; rm) mc_arc_fs_rm "$@" ;; *) exit 1 ;; esac exit 0 PK ! ���+ + uarnu ȯ�� #!/bin/sh # # Written by Alex Kuchma <ask@bcs.zp.ua> # Alex Tkachenko <alex@bcs.zp.ua> # Updated by Vitezslav Samel <xsamel00@dcse.fee.vutbr.cz> # # (C) 1997, 1998 The Free Software Foundation. # # XAR=ar mcarfs_list () { # If $temp_replace string is part of the filename that part might get lost temp_replace='Unique Separator String' thisyear="`date +%Y`" $XAR tv "$1" | sed 's,^,-,;s, , 1 ,;s,/, ,' | sed -e "s/\( [0-2][0-9]\:[0-5][0-9]\)\( $thisyear \)\(.*\)/\1$temp_replace\3/" | sed -e "s/\( [0-2][0-9]\:[0-5][0-9] \)\([12][0-9][0-9][0-9] \)\(.*\)/ \2\3/" | sed -e "s/$temp_replace/ /" } mcarfs_copyout () { $XAR p "$1" "$2" > "$3" } mcarfs_copyin () { TMPDIR=`mktemp -d "${MC_TMPDIR:-/tmp}/mctmpdir-uar.XXXXXX"` || exit 1 name=`basename "$2"` (cd "$TMPDIR" && cp -fp "$3" "$name" && $XAR r "$1" "$name") rm -rf "$TMPDIR" } mcarfs_rm () { $XAR d "$1" "$2" } # override any locale for dates LC_ALL=C export LC_ALL umask 077 case "$1" in list) mcarfs_list "$2" ;; copyout) shift; mcarfs_copyout "$@" ;; copyin) shift; mcarfs_copyin "$@" ;; rm) shift; mcarfs_rm "$@" ;; mkdir|rmdir) echo "mcarfs: ar archives cannot contain directories." 1>&2 exit 1;; *) echo "mcarfs: unknown command: \"$1\"." 1>&2 exit 1;; esac exit 0 PK ! ���#� � ucabnu ȯ�� #! /bin/sh CAB=cabextract mccabfs_list () { $CAB -l "$1" | awk -v uid=`id -un` -v gid=`id -gn` ' BEGIN { flag=0 } /^-------/ { flag++; if (flag > 1) exit 0; next } { if (flag == 0) next if (length($6) == 0) next pr="-rw-r--r--" split($3, a, ".") split($4, b, ":") printf "%s 1 %s %s %d %02d/%02d/%02d %02d:%02d %s\n", pr, uid, gid, $1, a[2], a[1], a[3], b[1], b[2], $6 }' } mccabfs_copyout () { $CAB -F "$2" -p "$1" > "$3" } LC_ALL=C export LC_ALL umask 077 cmd="$1" case "$cmd" in # Workaround for a bug in mc - directories must precede files to # avoid duplicate entries, so we sort output by filenames list) mccabfs_list "$2" ;; copyout) mccabfs_copyout "$2" "$3" "$4" ;; *) exit 1 ;; esac exit 0 PK ! A��˷ � bppnu ȯ�� #! /bin/sh # # Written by Marco Ciampa 2000 # (a simple cut & paste from rpm vfs) # (C) 1996 The Free Software Foundation. # # Package of a new italian distribution: Bad Penguin # http://www.badpenguin.org/ # override any locale for dates unset LC_ALL LC_TIME=C export LC_TIME mcbppfs_list () { FILEPREF="-r--r--r-- 1 root root " FIEXPREF="-r-xr-xr-x 1 root root " DATE=`date +"%b %d %H:%M"` set x `ls -l "$1"` size=$6 echo "$FILEPREF $size $DATE CONTENTS.tar.gz" echo "$FIEXPREF 35 $DATE INSTALL" echo "$FIEXPREF 35 $DATE UPGRADE" } mcbppfs_copyout () { case "$2" in CONTENTS.tar.gz) cat "$1" > "$3"; exit 0;; INSTALL) echo "# Run this to install this package" > "$3"; exit 0;; UPGRADE) echo "# Run this to upgrade this package" > "$3"; exit 0;; esac } mcbppfs_run () { case "$2" in INSTALL) echo "Installing \"$1\""; package-setup --install "$1"; exit 0;; UPGRADE) echo "Upgrading \"$1\""; package-setup --update "$1"; exit 0;; esac } umask 077 case "$1" in list) mcbppfs_list "$2"; exit 0;; copyout) mcbppfs_copyout "$2" "$3" "$4"; exit 0;; run) mcbppfs_run "$2" "$3"; exit 1;; esac exit 1 PK ! ��r� gitfs+nu ȯ�� #!/bin/sh LANG=C export LANG LC_TIME=C export LC_TIME umask 077 prefix='[git]' gitfs_list() { DATE=`date +"%b %d %H:%M"` GIT_DIR="$2/.git" user=`whoami` git ls-files -v -c -m -d | sort -k 2 | uniq -f 1 | while read status fname do [ "$status" = "H" ] && status=" " [ "$status" = "C" ] && status="*" echo "-r--r--r-- 1 $user 0 0 $DATE `dirname $fname`/$prefix$status`basename $fname`" done } gitfs_copyout() { printf "%s\n" "$2" > "$4" b=`echo "$prefix"| wc -c` b=`expr "$b" + 1` # remove prefix from file name echo "`dirname "$3"`/`basename "$3" | tail -c+"$b"`" >> "$4" echo "git" >> "$4" } case "$1" in list) gitfs_list "$@" ;; copyout) gitfs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! -�� � uhanu ȯ�� #!/bin/sh # # It is the uhafs Valery Kornienkov vlk@st.simbirsk.su 2:5051/30@fidonet # ver 0.1 Thu Apr 6 12:05:08 2000 # # Tested with HA 0.999. Source of ha can be found at # ftp://ftp.ibiblio.org/pub/Linux/utils/compress/ HA=ha mchafs_list () { $HA lf "$1" 2>/dev/null | awk -v uid=$(id -ru) ' /^===========/ {next} { if ($5="%" && $8~/DIR|ASC|HSC|CPY/) { split($6, a, "-") split($7, t, ":") filename=$1 filesize=$2 getline if ($2=="(none)") $2="" path=$2 getline if ($1~/^d.*/) next printf "%s %s %-8d %-8d %8d %s-%s-%s %s:%s %s%s\n",\ $1,1,0,0,filesize,a[3],a[2],a[1],t[1],t[2],path,filename } }' } mchafs_copyout () { TMPDIR=`mktemp -d "${MC_TMPDIR:-/tmp}/mctmpdir-uha.XXXXXX"` || exit 1 cd "$TMPDIR" $HA xyq "$1" "$2" >/dev/null cat "$2" > "$3" cd / rm -rf "$TMPDIR" } cmd="$1" shift case "$cmd" in list) mchafs_list "$@" ;; copyout) mchafs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! ��-(@ @ uzoonu ȯ�� #! /bin/sh # # Zoo file system # # Source of zoo can be found at # ftp://ftp.ibiblio.org/pub/Linux/utils/compress/ ZOO=${MC_TEST_EXTFS_LIST_CMD:-zoo} # Stupid zoo won't work if the archive name has no .zoo extension, so we # have to make a symlink with a "better" name. Also, zoo can create # directories even if printing files to stdout, so it's safer to confine # it to a temporary directory. mklink () { TMPDIR=`mktemp -d ${MC_TMPDIR:-/tmp}/mctmpdir-uzoo.XXXXXX` || exit 1 trap 'cd /; rm -rf "$TMPDIR"' 0 1 2 3 5 13 15 ARCHIVE="$TMPDIR/tmp.zoo" ln -sf "$1" "$ARCHIVE" cd "$TMPDIR" || exit 1 } mczoofs_list () { mklink "$1" $ZOO lq "$ARCHIVE" | awk -v uid=$(id -ru) ' /^[^\ ]/ { next } { if (NF < 8) next if ($8 ~ /^\^/) $8=substr($8, 2) if ($6 > 50) $6=$6 + 1900 else $6=$6 + 2000 split($7, a, ":") split("Jan:Feb:Mar:Apr:May:Jun:Jul:Aug:Sep:Oct:Nov:Dec", month_list, ":") for (i=1; i<=12; i++) { month[month_list[i]] = i } if ($8 ~ /\/$/) printf "drwxr-xr-x 1 %-8d %-8d %8d %02d-%02d-%4d %02d:%02d %s\n", uid, 0, $1, month[$5], $4, $6, a[1], a[2], $8 else printf "-rw-r--r-- 1 %-8d %-8d %8d %02d-%02d-%4d %02d:%02d %s\n", uid, 0, $1, month[$5], $4, $6, a[1], a[2], $8 }' 2>/dev/null exit 0 } mczoofs_copyout () { mklink "$1" # zoo only accepts name without directory as file to extract base=`echo "$2" | sed 's,.*/,,'` $ZOO xpq: "$ARCHIVE" "$base" > "$3" cd / exit 0 } umask 077 cmd="$1" shift case "$cmd" in list) mczoofs_list "$@" ;; copyout) mczoofs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! �j�� debanu ȯ�� #! /usr/bin/perl # # 1999 (c) Piotr Roszatycki <dexter@debian.org> # This software is under GNU license # last modification: 1999-12-08 # # deba sub quote { $_ = shift(@_); s/([^\w\/.+-])/\\$1/g; return($_); } sub list { my($qarchive)=@_; $qarchive = quote($qarchive); chop($date=`LC_ALL=C date "+%m-%d-%Y %H:%M"`); chop($info_size=`apt-cache show $qarchive | wc -c`); $install_size=length($pressinstall); $upgrade_size=length($pressupgrade); print "-r--r--r-- 1 root root $info_size $date INFO\n"; chop($debd = `dpkg -s $qarchive | grep -i ^Version | sed 's/^version: //i'`); chop($deba = `apt-cache show $qarchive | grep -i ^Version | sed 's/^version: //i'`); if( ! $debd ) { print "-r-xr--r-- 1 root root $install_size $date INSTALL\n"; } elsif( $debd ne $deba ) { print "-r-xr--r-- 1 root root $upgrade_size $date UPGRADE\n"; } } sub copyout { my($archive,$filename,$destfile)=@_; my $qarchive = quote($archive); my $qdestfile = quote($destfile); if($filename eq "INFO") { system("apt-cache show $qarchive > $qdestfile"); } elsif($filename eq "INSTALL") { if ( open(FILEOUT, "> $destfile") ) { print FILEOUT $pressinstall; close FILEOUT; system("chmod a+x $qdestfile"); } } elsif($filename eq "UPGRADE") { if ( open(FILEOUT, ">, $destfile") ) { print FILEOUT $pressupgrade; close FILEOUT; system("chmod a+x $qdestfile"); } } else { die "extfs: $filename: No such file or directory\n"; } } sub run { my($archive,$filename)=@_; my $qarchive = quote($archive); if($filename eq "INSTALL") { system("apt-get install $qarchive"); } elsif($filename eq "UPGRADE") { system("apt-get install $qarchive"); } else { die "extfs: $filename: Permission denied\n"; } } $pressinstall=<<EOInstall; WARNING Don\'t use this method if you are not willing to install this package... This is not a real file. It is a way to install the package you are browsing. To install this package go back to the panel and press Enter on this file. EOInstall $pressupgrade=<<EOInstall; WARNING Don\'t use this method if you are not willing to upgrade this package... This is not a real file. It is a way to upgrade the package you are browsing. To upgrade this package go back to the panel and press Enter on this file. EOInstall umask 077; chop($name = `if [ -f "$ARGV[1]" ]; then cat $ARGV[1]; else echo $ARGV[1]; fi`); $name =~ s%.*/([0-9a-z.-]*)_.*%$1%; exit 1 unless $name; if($ARGV[0] eq "list") { &list($name); exit 0; } elsif($ARGV[0] eq "copyout") { ©out($name,$ARGV[2],$ARGV[3]); exit 0; } elsif($ARGV[0] eq "run") { &run($name,$ARGV[2]); exit 0; } exit 1; PK ! ��}�v v uarjnu ȯ�� #! /bin/sh # # Written by Viatcheslav Odintsov (2:5020/181) # (C) 2002 ARJ Software Russia. # # This is an updated parser for ARJ archives in Midnight Commander. You need # full ARJ rather than UNARJ. Open-source ARJ v 3.10 for Unix platforms can # be obtained here: # # - http://www.sourceforge.net/projects/arj/ # - http://arj.sourceforge.net/ ARJ="arj -+ -ja1" mcarjfs_list () { $ARJ v "$1" | awk -v uuid=$(id -ru) ' { if (($0 ~ /^[0-9]+\) .*/)||($0 ~ /^------------ ---------- ---------- -----/)){ if (filestr ~ /^[0-9]+\) .*/) { printf "%s 1 %-8d %-8d %8d %02d-%02d-%02d %02d:%02d %s%s\n", perm, uid, gid, size, date[2], date[3], date[1], time[1], time[2], file, symfile perm="" file="" symfile="" filestr="" } } if ($0 ~ /^[0-9]+\) .*/) { filestr=$0 sub(/^[0-9]*\) /, "") file=$0 uid=uuid gid=0 } if ($0 ~ /^.* [0-9]+[\t ]+[0-9]+ [0-9]\.[0-9][0-9][0-9] [0-9][0-9]-[0-9][0-9]-[0-9][0-9] [0-9][0-9]:[0-9][0-9]:[0-9][0-9].*/) { size=$3 split($6, date, "-") split($7, time, ":") if ($8 ~ /^[rwx-]/) {perm=$8;} else {perm="-rw-r--r--"} } if ($0 ~ /^[\t ]+SymLink -> .*/) { symfile = " -> "$3 perm="l"substr(perm, 2) } if ($0 ~ /^[\t ]+Owner: UID [0-9]+\, GID [0-9]+/) { uid=$3 gid=$5 owner=1 } }' } mcarjfs_copyout () { $ARJ e -y "$1" "$2" -jw"$3" >/dev/null 2>/dev/null } umask 077 cmd="$1" shift case "$cmd" in list) mcarjfs_list "$@" ;; copyout) mcarjfs_copyout "$@" ;; *) exit 1 ;; esac exit 0 PK ! <+�� ulibnu ȯ�� #! /usr/bin/perl -w # # VFS to manage the gputils archives. # Written by Molnár Károly (proton7@freemail.hu) 2012 # my %month = ('jan' => '01', 'feb' => '02', 'mar' => '03', 'apr' => '04', 'may' => '05', 'jun' => '06', 'jul' => '07', 'aug' => '08', 'sep' => '09', 'oct' => '10', 'nov' => '11', 'dec' => '12'); my @PATHS = ('/usr/bin/gplib', '/usr/local/bin/gplib'); my $gplib = ''; foreach my $i (@PATHS) { if (-x $i) { $gplib = $i; last; } } if ($gplib eq '') { print STDERR "\a\t$0 : Gplib not found!\n"; exit(1); } my $cmd = shift; my $archive = shift; #------------------------------------------------------------------------------- sub mc_ulib_fs_list { open(PIPE, "$gplib -tq $archive |") || die("Error in $gplib -tq"); my($dev, $inode, $mode, $nlink, $uid, $gid) = stat($archive); while (<PIPE>) { chomp; my @w = split(/\s+/o); my $fname = $w[0]; $fname =~ s|\\|/|g; printf("-rw-r--r-- 1 %s %s %d %s-%02u-%s %s %s\n", $uid, $gid, int($w[1]), $month{lc($w[4])}, $w[5], $w[7], substr($w[6], 0, 5), $fname); } close (PIPE); } #------------------------------------------------------------------------------- sub mc_ulib_fs_copyin { system("$gplib -r $archive $_[0]"); my $ret = $?; if ($ret) { die("Error in: $gplib -r"); } } #------------------------------------------------------------------------------- sub mc_ulib_fs_copyout { my($module, $fname) = @_; my $tmpdir = $ENV{'TMPDIR'}; $tmpdir = '/tmp' if (! defined $tmpdir or $tmpdir eq ''); open(PIPE, "$gplib -tq $archive |") || die("Error in: $gplib -tq"); while (<PIPE>) { chomp; my @w = split(/\s+/o); my $module_orig = $w[0]; my $count = () = ($module_orig =~ /(\\)/g); my $md = $module_orig; $md =~ s|\\|/|g; if ($module eq $md) { return if ($count); } } close (PIPE); chdir($tmpdir); system("$gplib -x $archive $module"); my $ret = $?; if ($ret) { die("Error in: $gplib -x"); } rename($module, $fname) || die("Error in: rename($module, $fname)"); } #------------------------------------------------------------------------------- sub mc_ulib_fs_rm { system("$gplib -d $archive $_[0]"); my $ret = $?; if ($ret) { die("Error in: $gplib -d"); } } ################################################################################ if ($cmd eq 'list') { mc_ulib_fs_list(@ARGV); } elsif ($cmd eq 'copyin') { mc_ulib_fs_copyin(@ARGV); } elsif ($cmd eq 'copyout') { mc_ulib_fs_copyout(@ARGV); } elsif ($cmd eq 'rm') { mc_ulib_fs_rm(@ARGV); } else { exit(1); } PK ! ��s�X X uwimnu ȯ�� PK ! ���7 �7 � rpmnu ȯ�� PK ! TGZ{ pL changesetfsnu ȯ�� PK ! �nD , , �V patchfsnu ȯ�� PK ! ���t t � READMEnu �[��� PK ! A?0� �� patchsetfsnu ȯ�� PK ! ;���� � ۩ uacenu ȯ�� PK ! ��� � u7znu ȯ�� PK ! _u?�� � 5� mailfsnu ȯ�� PK ! �k:�� � <� ualznu ȯ�� PK ! ��m�; �; o� uzipnu ȯ�� PK ! 3�;�� � H rpms+nu ȯ�� PK ! �[�� � b! README.extfsnu �[��� PK ! =��X �X & uc1541nu ȯ�� PK ! H�q@ @ 0 iso9660nu ȯ�� PK ! T�\� � �� urarnu ȯ�� PK ! ���L L k� hp48+nu ȯ�� PK ! �p{3 3 � s3+nu ȯ�� PK ! �*A� 9� ulhanu ȯ�� PK ! =�=�' ' �� audionu ȯ�� PK ! �7`T� � � unarnu ȯ�� PK ! ~���&# &# � dpkg+nu ȯ�� PK ! oD�~� � O' debnu ȯ�� PK ! �Wp� � = a+nu ȯ�� PK ! >��S� � �I trpmnu ȯ�� PK ! ��D�� � �h lslRnu ȯ�� PK ! �6[��% �% yn apt+nu ȯ�� PK ! 砜�) ) �� debdnu ȯ�� PK ! s��# # � uarcnu ȯ�� PK ! ���+ + =� uarnu ȯ�� PK ! ���#� � �� ucabnu ȯ�� PK ! A��˷ � �� bppnu ȯ�� PK ! ��r� �� gitfs+nu ȯ�� PK ! -�� � �� uhanu ȯ�� PK ! ��-(@ @ �� uzoonu ȯ�� PK ! �j�� I� debanu ȯ�� PK ! ��}�v v � uarjnu ȯ�� PK ! <+�� )� ulibnu ȯ�� PK &