Comment atom

This commit is contained in:
Vincent Riquer 2026-02-20 04:07:38 +01:00
parent 756ce7ec01
commit d680d52425

136
atom
View File

@ -13,6 +13,9 @@
# A copy of the GNU General Public License v3 is includded in the LICENSE file
# at the root of the project.
# Directories for various required data. Set by the configure script and the
# Makefile.
# Also save $IFS just in case.
declare -r \
DOCDIR=%DOCDIR% \
LIBDIR=%LIBDIR% \
@ -26,7 +29,7 @@ declare -r \
## Define exit codes
source "$SHAREDIR"/errorcodes
# config structures
# Config structures.
declare -A \
destinationenabled \
destinationascii \
@ -51,10 +54,13 @@ declare -A \
exit $EBASHVERS
}
# Locales break parsingg.
LC_ALL=C
# Enable extended globbing for some filename manipulations.
shopt -s extglob
# Array of ID3v1 genres, number to name mapping.
source "$SHAREDIR"/id3genres
for function in "$LIBDIR"/*/*
@ -62,6 +68,7 @@ do
source "$function"
done
# Migrate old config to XDG where required
if ! [[ -f "${XDG_CONFIG_HOME:-$HOME/.config}/AtOM/atom.cfg" ]] \
&& [[ -f "$HOME/.atom/atom.cfg" ]]
then
@ -141,6 +148,9 @@ do
done
askconf() {
# Prompt user to interactively create a config if it doesn't exist and
# we're not in cron mode.
# Called recursively until a valid answer is given.
if (( cron ))
then
echo 'Non-interactive, not running setup. Please run atom -S.' >&2
@ -165,6 +175,7 @@ askconf() {
esac
}
# Read config if it exists, call askconf otherwise.
if [ ! -f "$cffile" ]
then
if [ ! -d "${cffile%/*}" ]
@ -176,14 +187,17 @@ then
fi
getConfig
# Uf user wants to changeg config, run setup.
(( forcesetup )) && setup
# Deactivate `!` history expansion, just in case.
set +H
# Apply CLI overrides
[ -n "$cliload" ] && maxload=$cliload
[ -n "$cliltimer" ] && loadinterval=$cliltimer
# Print config if requested or in debug mode. Exit if only dumping config.
(( debug || cfgdump )) && printConfig
(( cfgdump )) && exit
@ -191,6 +205,11 @@ set +H
sanityCheck
openDatabase
# create missing destination directories and DB entries, get destination IDs
# for later use
createDestinations
# Apply destinations "enabled" status in DB with respect to config.
for destination in "${destinations[@]}"
do
if (( ${destinationenabled["$destination"]} ))
@ -201,14 +220,20 @@ do
fi
done
createDestinations
# get source files. Update DB.
getFiles
# Scan mime-types (for new/changed files). Update DB
updateMimes
# Remove source files that are gone from DB. (`last_seen` column).
# FOREIGN KEY `source_file_id` on table `destination_files` gets set to NULL
# by `ON DELETE` parameter. We can use that to find destination files that need
# to be removed.
removeObsoleteFiles
# remove destination files for which the source file is gone. (`source_file_id`
# NUUL -- `rm` them if they exist, remove the DB entry in any case)
(( cron )) || echo -n 'Gathering files for cleaning...'
echo '
SELECT COUNT(id)
@ -216,6 +241,7 @@ echo '
WHERE source_file_id is NULL;' >&3
read -u4 -r -d $'\0' removecount
# Gather in 500 files batches to avoid pipe overflow.
until (( ${#removefile[@]} == removecount ))
do
echo '
@ -247,6 +273,10 @@ done
unset deleted
unset removed
echo 'BEGIN TRANSACTION;' >&3
# Remove the files if they exist. Unconditionnally remove from DB.
# Run in transactrion to speed up the process, COMMIT every 1000th file in case
# process gets killed.
for id in ${!removefile[@]}
do
filename=${removefile[id]}
@ -274,8 +304,14 @@ echo -n "${deleted+$deleted files deleted${removed:+, }}${removed:+$removed remo
(( deleted || removed )) && echo
unset removecount deleted removed removefile
# Update tags for new/changed files and updated tag parsers. Update DB.
# Uses `tags.last_changge` vs `source_files.last_change`
# + `tags.tagreader` vs tagreader versions declared in running version's
# source code.
updateTags
# Reset timestamps for files in destinations that were requested to be fully
# rebuilt.
for forcedest in "${forceall[@]}"
do
if forcedestid=$(Select destinations id <<<"name = $forcedest")
@ -290,6 +326,18 @@ do
fi
done
# Create TEMPORARY (in-memory) tables for tasks. Up to 60 arguments per task
# (including command).
#
# `requires` designates the id of a tasks that must be completed before this
# one can be run.
# `required_by` is a counter of tasks that require this one. Used for temp
# files cleanup.
# `status` is 0 for pending tasks, 2 for failed tasks, 4 for completed tasks
# depended upon (temp file should be left intact).
#
# TRIGGER `fail_depends` sets status of all tasks that depend on a failed task
# to 2
echo '
CREATE TEMPORARY TABLE tasks(
id INTEGER PRIMARY KEY,
@ -383,6 +431,7 @@ echo '
END;
' >&3
# Get number of files to process. Apply `maxbatch` limit if specified.
echo '
SELECT COUNT(source_files.id)
FROM source_files
@ -406,6 +455,8 @@ then
(( togo = filecount - maxbatch ))
filecount=$maxbatch
fi
# Get files to process. Apply `maxbatch` limit if specified.
echo '
SELECT
source_files.id,
@ -452,16 +503,26 @@ echo ';
read -u4 -r -d $'\0' line
while ! [[ $line = AtOM:NoMoreFiles ]]
do
# Append `::AtOM:SQL:Sep::` at the end of the line to make sure we can
# parse empty fields.
decodefiles+=("$line::AtOM:SQL:Sep::")
read -u4 -r -d $'\0' line
done
(( cron )) || echo -n $'Creating tasks...\033[K'
# Spawn perl coprocess for unicode to ascii conversion if needed.
(( textunidecodeneeded )) && ascii
# Generate tasks for each file. Tasks that depend on other tasks (e.g. encoding
# depends on decoding) get the ID of the task they depend on in `requires`
# column. This is used to make sure that encoding doesn't start before decoding
# and other transforms have completed.
echo 'BEGIN TRANSACTION;' >&3
for line in "${decodefiles[@]}"
do
# Parsing SQL output is fun. We use `::AtOM:SQL:Sep::` as separator
# between fields at the end of the line to make sure we can parse empty
# fields.
fileid=${line%%::AtOM:SQL:Sep::*}
rest=${line#*::AtOM:SQL:Sep::}
filename=${rest%%::AtOM:SQL:Sep::*}
@ -506,14 +567,24 @@ do
rest=${rest#*::AtOM:SQL:Sep::}
year=${rest%%::AtOM:SQL:Sep::*}
unset rest
# Skip destinations with formats for which tools are missing.
case ${destinationformat["$destination"]} in
vorbis) (( disableoggenc )) && continue ;;
opus) (( disableopusenc )) && continue ;;
mp3) (( disablelame )) && continue ;;
esac
# Create decoding task depending on mimetype.
decodeFile
# Build target directory path from source file path OR from rename
# pattern if set.
getDestDir
# Same for filename.
getDestFile
# Set copied to 1 for files with extension in `copy_extension`.
for copy_ext in "${destinationcopyext[@]}"
do
if [[ $filename =~ '.*\.'"$copy_ext"'$' ]]
@ -524,10 +595,17 @@ do
done
if (( copied ))
then
# Copy file as-is to destination.
copyFiles_matching
else
# Call suitable function to create encoding task depending on
# destination format.
# encodeFile::mp3
# encodeFile::opus
# encodeFile::vorbis
encodeFile::${destinationformat[$destination]}
fi
# Cleanup variables. Avoids leaking data between iterations.
unset \
album \
albumartist \
@ -570,6 +648,13 @@ echo 'COMMIT;' >&3
# remove perl unicode to ascii coprocess
(( textunidecodeneeded )) && eval exec "${toascii[1]}>&-"
# Main loop. Run up to `concurrency` tasks in parallel, depending on system
# load. Spawn new tasks as old ones complete. Update progress info and commit
# DB every minute.
#
# Start with concurrency = maxload / 2, which seems like a good starting point
# on most systems. If result is 0, force to 1 to make sure we get going. If
# `-f <workers>` option was used, use that value instead and don't change it.
concurrency=$(( maxload / 2 ))
(( concurrency )) || concurrency=1
active=0
@ -590,6 +675,13 @@ do
fi
read humanload garbage < /proc/loadavg
load=${humanload%.*}
# If `-f <workers>` option was used, keep concurrency fixed to that
# value. Otherwise, adjust concurrency according to load and `maxload`
# value. If load is above `maxload`, reduce concurrency by 1 (down to 0
# if `allow_zero_running` is set). If load is below `maxload`, increase
# concurrency by 1 (only if all slots are populated).
# Don't update concurrency more often than every `load-interval` seconds
# to reduce histeresis.
if (( fixed_workers ))
then
concurrency="$fixed_workers"
@ -609,9 +701,30 @@ do
fi
fi
fi
# check if workers have finished.
# If a worker finished with non-zero exit code, mark the task as failed
# in DB. TRIGGER `fail_depends` will fail all tasks that depend on it.
checkworkers
# If task failed, set status to 2, remove temp files if any and
# increment `failed` counter. Don't delete task from DB to keep track
# of failed tasks for final report.
# If task was successful, update destination_files.last_change to
# source_files.last_change, update old_filename to previous
# (destination file) filename, store rename_pattern, fat32compat and
# ascii settings.
# Set status to 4 for tasks that were depended upon by other tasks to
# avoid cleaning up temp files before all tasks depending on them have
# completed, delete task otherwise.
cleaner
# Look for pending tasks that can be started (required tasks's status
# is 4 or reqquires is NULL) and start them.
# Trigger a dump of the tasks table if it's inconsistent (no running
# tasks, no ready tasks, but pending tasks exist) and exit with error
# $ETASKLEFT.
master
# "Fancy" progress info. Only calculate if at least one task has
# succeeded to avoid division by zero.
if (( ran - failed ))
then
currenttime=$timestamp
@ -650,6 +763,7 @@ do
fmtworkers='W:%i/%i'
fmtprogress="T:%${#taskcount}i/%i (F:%i) %3i%%"
fmttime='%2id %2ih%02im%02is (A:%4.1fs/task)'
# Abuse timeformatting to get ETA.
eta="ETA:$(
printf "%(%c)T" "$(( currenttime + secsremaining ))"
)"
@ -668,6 +782,7 @@ do
${minutes:-0} \
${seconds:-0} \
${avgdsec:-0}.${avgdmsec:-0}
# If 0 concurrency is allowed, show paused status when concurrency is 0
if ! (( concurrency )) && ! (( cron ))
then
if (( active ))
@ -681,6 +796,7 @@ done
echo 'COMMIT;' >&3
unset count
# Final report. Calculate elapsed time and format it in human readable way.
endtime=$EPOCHSECONDS
(( elapsedseconds = endtime - starttime ))
@ -715,6 +831,9 @@ endtime=$EPOCHSECONDS
(( cron )) || echo -en "\033[K"
(( ran )) && echo
# If some tasks failed, print them. Don't print failed tasks that did not run
# to avoid confusing tasks marked as failed because they depended on ones that
# failed.
if (( failed ))
then
echo $'\nFailed tasks:\n'
@ -801,6 +920,8 @@ then
done
fi
# Check if there are files that need to be renamed because their rename pattern
# changed.
for destination in "${!destinationpath[@]}"
do
echo '
@ -860,6 +981,8 @@ do
'vorbis') extension=ogg ;;
esac
(( cron )) || echo -en "$destination: rename pattern changed, renaming files...\033[K"
# Spawn perl coprocess for unicode to ascii conversion if
# needed.
(( textunidecodeneeded )) && ascii
echo 'BEGIN TRANSACTION;' >&3
for line in "${renamefiles[@]}"
@ -935,8 +1058,12 @@ do
unset count changedcount renamefiles
done
# Copy files of mime-types matching `copy_mime-type`
copyFiles_action
# Remove files obsoleted by `renamme_pattern`, `ascii` or `fat32compat` changes.
# Based on `destination_files.old_filename` field, populated upon task
# completion.
echo '
SELECT destination_files.id,
destination_files.filename,
@ -983,6 +1110,9 @@ echo 'COMMIT;' >&3
(( cron )) || echo -en "\033[K"
(( count )) && echo
# Remove empty directories in destinations.
# We blindly duplicate the source tree in the destination, so we may end up
# with empty directories.
(( debug )) && echo "Purging empty directories..."
for path in "${destinationpath[@]}"
do