#!/bin/sh

# eget - simply shell on wget for loading directories over http (wget does not support wildcard for http)
# Use:
# eget http://ftp.altlinux.ru/pub/security/ssl/*
#
# Copyright (C) 2014-2014, 2016, 2020, 2022, 2025  Etersoft
# Copyright (C) 2014 Daniil Mikhailov <danil@etersoft.ru>
# Copyright (C) 2016-2017, 2020, 2022, 2025 Vitaly Lipatov <lav@etersoft.ru>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#

init_eget()
{
PROGDIR=$(dirname "$0")
PROGNAME=$(basename "$0")
CMDSHELL="/bin/sh"
[ "$PROGDIR" = "." ] && PROGDIR="$(pwd)"
if [ "$0" = "/dev/stdin" ] || [ "$0" = "sh" ] ; then
    PROGDIR=""
    PROGNAME=""
fi
}
init_eget


fatal()
{
    echo "FATAL: $*" >&2
    exit 1
}

info()
{
    [ -n "$quiet" ] && return
    echo "$*" >&2
}

eget()
{
	if [ -n "$EPMMODE" ] ; then
		# if embedded in epm
		(unset EGET_IPFS_GATEWAY; unset EGET_IPFS_API ; unset EGET_IPFS_DB ; EGET_BACKEND=$ORIG_EGET_BACKEND internal_tools_eget "$@" )
		return
	fi

	[ -n "$PROGNAME" ] || fatal "pipe mode is not supported"

	local bashopt=''
	#[ -n "$verbose" ] && bashopt='-x'

	(unset EGET_IPFS_GATEWAY; unset EGET_IPFS_API ; unset EGET_IPFS_DB ; EGET_BACKEND=$ORIG_EGET_BACKEND $CMDSHELL $bashopt $PROGDIR/$PROGNAME "$@" )
}

# TODO:
arch="$(uname -m)"

# copied from eepm project

# copied from /etc/init.d/outformat (ALT Linux)
isatty()
{
	# Set a sane TERM required for tput
	[ -n "$TERM" ] || TERM=dumb
	export TERM
	test -t 1
}

isatty2()
{
	# check stderr
	test -t 2
}


check_tty()
{
	isatty || return
	is_command tput >/dev/null 2>/dev/null || return
	# FreeBSD does not support tput -S
	echo | a= tput -S >/dev/null 2>/dev/null || return
	export USETTY="tput -S"
}

: ${BLACK:=0} ${RED:=1} ${GREEN:=2} ${YELLOW:=3} ${BLUE:=4} ${MAGENTA:=5} ${CYAN:=6} ${WHITE:=7}

set_boldcolor()
{
	[ -n "$USETTY" ] || return
	{
		echo bold
		echo setaf $1
	} | $USETTY
}

set_color()
{
	[ -n "$USETTY" ] || return
	{
		echo setaf $1
	} | $USETTY
}

restore_color()
{
	[ -n "$USETTY" ] || return
	{
		echo op; # set Original color Pair.
		echo sgr0; # turn off all special graphics mode (bold in our case).
	} | $USETTY
}


echover()
{
    [ -n "$verbose" ] || return
    echo "$*" >&2
}

# Print command line and run command line
showcmd()
{
	if [ -z "$quiet" ] ; then
		set_boldcolor $GREEN
		local PROMTSIG="\$"
		[ "$UID" = 0 ] && PROMTSIG="#"
		echo " $PROMTSIG $@"
		restore_color
	fi >&2
}

# Print command line and run command line
docmd()
{
	showcmd "$@"
	"$@"
}

verdocmd()
{
	[ -n "$verbose" ] && showcmd "$@"
	"$@"
}


# copied from epm
# print a path to the command if exists in $PATH
if a= which which 2>/dev/null >/dev/null ; then
    # the best case if we have which command (other ways needs checking)
    # TODO: don't use which at all, it is binary, not builtin shell command
print_command_path()
{
    a= which -- "$1" 2>/dev/null
}
elif a= type -a type 2>/dev/null >/dev/null ; then
print_command_path()
{
    a= type -fpP -- "$1" 2>/dev/null
}
else
print_command_path()
{
    a= type "$1" 2>/dev/null | sed -e 's|.* /|/|'
}
fi

# check if <arg> is a real command
is_command()
{
    print_command_path "$1" >/dev/null
}

# add realpath if missed
if ! is_command realpath ; then
realpath()
{
    [ -n "$*" ] || return
    readlink -f "$@"
}
fi


# check man glob
filter_glob()
{
	[ -z "$1" ] && cat && return
	# translate glob to regexp
	grep "$(echo "$1" | sed -e 's|\.|\\.|g' -e 's|\*|.*|g' -e 's|\?|.|g' )$"
}

filter_order()
{
    if [ -n "$SECONDLATEST" ] ; then
        sort -V | tail -n2 | head -n1
        return
    fi
    [ -z "$LATEST" ] && cat && return
    sort -V | tail -n1
}

have_end_slash_or_php_parametr()
{
    echo "$1" | grep -qE '(/$|\.php($|\?))'
}

is_abs_path()
{
    echo "$1" | grep -q '^/'
}

is_fileurl()
{
    is_abs_path "$1" && return
    echo "$1" | grep -q "^file:/"
}

path_from_url()
{
    echo "$1" | sed -e 's|^file://*|/|'
}

is_url()
{
    echo "$1" | grep -q "^[filehtps]*:/"
}

is_strange_url()
{
    local URL="$1"
    is_url "$URL" || return
    #echo "$URL" | grep -q -E "\.(deb|rpm|zip)\?" && return 1
    echo "$URL" | grep -q "[?&]"
}

is_ipfs_hash()
{
    # If a CID is 46 characters starting with "Qm", it's a CIDv0
    echo "$1" | grep -q -E "^Qm[[:alnum:]]{44}$" && return
    # TODO: CIDv1 support, see https://github.com/multiformats/cid
    return 1
}

is_ipfsurl()
{
    is_ipfs_hash "$1" && return
    echo "$1" | grep -q "^ipfs://"
}

is_httpurl()
{
    # TODO: improve
    echo "$1" | grep -q "^https://" && return
    echo "$1" | grep -q "^http://" && return
}

is_ftpurl()
{
    echo "$1" | grep -q "^ftp://"
}

is_rsyncurl()
{
    echo "$1" | grep -q "^rsync://"
}

# SSH/rsync URL: user@host:/path or host:/path (but not scheme://)
is_sshurl()
{
    # Match host:path or host:/path, but not scheme://
    echo "$1" | grep -qE '^[^/:]+:' && ! echo "$1" | grep -q "://"
}

cid_from_url()
{
    echo "$1" | sed -e 's|^ipfs://*||' -e 's|\?.*||'
}


# args: cmd <URL> <options>
# will run cmd <options> <URL>
# If CHECKMIRRORS is set and EGET_MIRRORS contains mirror hosts,
# will try each mirror on failure
download_with_mirroring()
{
    local CMD="$1"
    shift
    local URL="$1"
    shift

    local res
    $CMD "$@" "$URL" && return
    res=$?
    [ -n "$CHECKMIRRORS" ] || return $res

    # Default mirrors if EGET_MIRRORS is not set
    local mirrors="${EGET_MIRRORS:-https://mirror.eterfund.ru https://mirror.eterfund.org}"
    local MIRROR SECONDURL
    for MIRROR in $mirrors ; do
        # Replace protocol:// with mirror/, preserving original host as path component
        SECONDURL="$(echo "$URL" | sed -e "s|^[a-z]*://|$MIRROR/|")"
        $CMD "$@" "$SECONDURL" && return
    done
    return $res
}



check_tty

quiet=''
verbose=''
WGETNOSSLCHECK=''
CURLNOSSLCHECK=''
AXELNOSSLCHECK=''
USERAGENT=''
WGETHEADER=''
CURLHEADER=''
AXELHEADER=''
WGETCOMPRESSED=''
CURLCOMPRESSED=''
AXELCOMPRESSED=''
WGETQ='' #-q
CURLQ='' #-s
AXELQ='' #-q
ARIA2Q=''
# TODO: 
WGETNAMEOPTIONS='--content-disposition'
CURLFILENAMEOPTIONS='--remote-name --remote-time --remote-header-name'
CURLNAMEOPTIONS='--remote-time --remote-header-name'
AXELNAMEOPTIONS=''
WGETRUSTSERVERNAMES=''
CURLTRUSTSERVERNAMES=''

USEOUTPUTDIR=''
WGETNODIRECTORIES=''
CONTINUE=''
FORCEOVERWRITE=''
WGETTIMEOUT=''
CURLMAXTIME=''
WGETREADTIMEOUT=''
WGETRETRYCONNREFUSED=''
CURLRETRYCONNREFUSED=''
WGETTRIES='--tries 1'
CURLRETRY=''
WGETLOADCOOKIES=''
CURLCOOKIE=''

NOGLOB=''
LISTONLY=''
CHECKURL=''
CHECKSITE=''
GETRESPONSE=''
GETFILENAME=''
GETREALURL=''
GETIPFSCID=''
LATEST=''
SECONDLATEST=''
CHECKMIRRORS=''
TARGETFILE=''
FORCEIPV=''

WGETSHOWPROGRESS=''
CURLSHOWPROGRESS=''
ARIA2SHOWPROGRESS=''
AXELSHOWPROGRESS=''

TIMESTAMPING=''
INPUTFILE=''

set_quiet()
{
    WGETQ='-q'
    CURLQ='-s'
    AXELQ='--quiet'
    ARIA2Q=''
    quiet=1
}

unset_quiet()
{
    WGETQ=''
    CURLQ=''
    AXELQ=''
    ARIA2Q=''
    quiet=''
}


eget_help()
{
cat <<EOF

eget - wget like downloader wrapper with wildcard support in filename part of URL
Usage: eget [options] http://somesite.ru/dir/na*.log

Options:
    -q|--quiet                - quiet mode
    --verbose                 - verbose mode
    --show-progress           - display progress bar even in quiet mode
    -k|--no-check-certificate - skip SSL certificate chain support
    --no-content-disposition  - skip Content-Disposition header
    -H|--header               - use <header> (X-Cache:1 for example)
    -U|-A|--user-agent        - send browser like UserAgent
    --compressed              - request a compressed response and automatically decompress the content
    -4|--ipv4|--inet4-only    - use only IPV4
    -6|--ipv6|--inet6-only    - use only IPV6
    -O-|-O -                  - output downloaded file to stdout
    -O file                   - download to this file
    -P|--output-dir           - download to this directory

    -nd|--no-directories      - do not create a hierarchy of directories when retrieving recursively
    --no-glob                 - turn off file name globbing
    -c|--continue             - continue getting a partially-downloaded file
    --force|--allow-overwrite - force overwrite existing file
    -N|--timestamping         - only download if remote file is newer than local
    -i|--input-file FILE      - read URLs from FILE, one per line; each line can contain multiple mirror URLs (use - for stdin)
    -T|--timeout=N            - set  the network timeout to N seconds
    --read-timeout=N          - set the read (and write) timeout to N seconds
    --retry-connrefused       - consider “connection refused” a transient error and try again
    -t|--tries                - set number of tries to number. Specify 0 or ‘inf’ for infinite retrying
    --load-cookies file       - load cookies from file before the first HTTP retrieval
    --latest                  - print only latest version of a file
    --second-latest           - print only second to latest version of a file
    --allow-mirrors           - try mirrors from EGET_MIRRORS if url is not accessible
    --trust-server-names      - use the name specified by the redirection

    --list|--list-only        - print only URLs
    --check-url URL           - check if the URL exists (returns HTTP 200 OK)
    --check-site URL          - check if the site is accessible (returns HTTP 200 OK or 404 Not found)
    --get-response URL        - get response with all headers (ever if HEAD is not acceptable)
    --get-filename URL        - print filename for the URL (via Content-Disposition if applicable)
    --get-real-url URL        - print URL after all redirects
    --get-ipfs-cid URL        - print CID for URL (after all redirects)

Supported URLs:
  ftp:// http:// https:// file:/ ipfs:// rsync:// [user@]host:/path

Supported backends (set like EGET_BACKEND=curl)
  wget, curl and partially aria2c, axel, rsync

Also you can set EGET_OPTIONS variable with needed options
Set EGET_MIRRORS to override default mirrors for --allow-mirrors (default: eterfund mirrors)

Examples:
  $ eget http://ftp.somesite.ru/package-*.x64.tar
  $ eget http://ftp.somesite.ru/package *.tar
  $ eget https://github.com/owner/project package*.ext
  $ eget -O myname ipfs://QmVRUjnsnxHWkjq91KreCpUk4D9oZEbMwNQ3rzdjwND5dR
  $ eget --list http://ftp.somesite.ru/package-*.tar
  $ eget --check-url http://ftp.somesite.ru/test
  $ eget --list http://download.somesite.ru 'package-*.tar.xz'
  $ eget --list --latest https://github.com/telegramdesktop/tdesktop/releases 'tsetup.*.tar.xz'

EOF
}


if [ -z "$1" ] ; then
    echo "eget - wget like downloader wrapper with wildcard support, uses wget or curl as backend" >&2
    echo "Run $0 --help to get help" >&2
    exit 1
fi

__eget_parse_options()
{
local argument
local argvalue
local count="$#"
while [ -n "$1" ] ; do
    argument="$(echo "$1" | cut -d= -f1)"
    argvalue="$(echo "$1" | cut -s -d= -f2-)"
    case "$argument" in
        -h|--help)
            eget_help
            exit
            ;;
        -q|--quiet)
            set_quiet
            ;;
        --verbose)
            verbose="$1"
            ;;
        --show-progress)
            WGETSHOWPROGRESS='--show-progress'
            CURLSHOWPROGRESS='--progress-bar'
            ARIA2SHOWPROGRESS='--show-console-readout=true'
            AXELSHOWPROGRESS='1'
            ;;
        -k|--no-check-certificate)
            WGETNOSSLCHECK='--no-check-certificate'
            CURLNOSSLCHECK='-k'
            AXELNOSSLCHECK='--insecure'
            ;;
        --no-content-disposition)
            WGETNAMEOPTIONS=''
            CURLFILENAMEOPTIONS=''
            CURLNAMEOPTIONS=''
            AXELNAMEOPTIONS=''
            ;;
        -H|--header)
            # TODO: error if header value contains spaces
            if [ -z "$argvalue" ];then
                shift
                argvalue="$(printf '%s' "$1" | tr -d ' ')"
            fi
            [ -z "$argvalue" ] && fatal "Error: --header requires an argument"
            WGETHEADER="--header=$argvalue"
            CURLHEADER="--header $argvalue"
            AXELHEADER="--header=$argvalue"
            ;;
        -P|--output-dir)
            if [ -z "$argvalue" ];then
                shift
                argvalue="$1"
            fi
            [ -z "$argvalue" ] && fatal "Error: --output-dir requires an argument"
            USEOUTPUTDIR="$argvalue"
            ;;
        -U|-A|--user-agent)
            USERAGENT="Mozilla/5.0 (X11; Linux $arch) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36"
            ;;
        --compressed)
            CURLCOMPRESSED='--compressed'
            WGETCOMPRESSED='--compression=auto'
            ;;
        -4|--ipv4|--inet4-only)
            FORCEIPV="-4"
            ;;
        -6|--ipv6|--inet6-only)
            FORCEIPV="-6"
            ;;
        --list|--list-only)
            LISTONLY="$1"
            set_quiet
            ;;
        --check-url)
            CHECKURL="$1"
            #set_quiet
            ;;
        --check-site|--check)
            CHECKSITE="$1"
            #set_quiet
            ;;
        --get-filename)
            GETFILENAME="$1"
            ;;
        --get-response)
            GETRESPONSE="$1"
            ;;
        --get-real-url)
            GETREALURL="$1"
            ;;
        --get-ipfs-cid)
            GETIPFSCID="$1"
            ;;
        --latest)
            LATEST="$1"
            ;;
        --second-latest)
            SECONDLATEST="$1"
            ;;
        --allow-mirrors|--check-mirrors)
            CHECKMIRRORS="$1"
            ;;
        -O)
            shift
            TARGETFILE="$1"
            ;;
        -O-)
            TARGETFILE="-"
            ;;
        -nd|--no-directories)
            WGETNODIRECTORIES="$1"
            ;;
        --no-glob)
            NOGLOB="--no-glob"
            ;;
        -c|--continue)
            CONTINUE=1
            # curl: --continue-at and --remote-header-name cannot be combined
            CURLFILENAMEOPTIONS='--remote-name --remote-time'
            CURLNAMEOPTIONS='--remote-time'
            ;;
        --force|--allow-overwrite)
            FORCEOVERWRITE=1
            ;;
        -N|--timestamping)
            TIMESTAMPING="1"
            ;;
        -i|--input-file)
            if [ -z "$argvalue" ];then
                shift
                argvalue="$1"
            fi
            [ -z "$argvalue" ] && fatal "Error: --input-file requires an argument"
            INPUTFILE="$argvalue"
            ;;
        -T|--timeout)
            if [ -z "$argvalue" ];then
                shift
                argvalue="$1"
            fi
            [ -z "$argvalue" ] && fatal "Error: --timeout requires an argument"
            ! is_numeric "$argvalue" && fatal "Error: --timeout requires a numeric value, got '$argvalue'"
            WGETTIMEOUT="--timeout $argvalue"
            CURLMAXTIME="--max-time $argvalue"
            AXELTIMEOUT="--timeout=$argvalue"
            ;;
        --read-timeout)
            if [ -z "$argvalue" ];then
                shift
                argvalue="$1"
            fi
            [ -z "$argvalue" ] && fatal "Error: --read-timeout requires an argument"
            ! is_numeric "$argvalue" && fatal "Error: --read-timeout requires a numeric value, got '$argvalue'"
            WGETREADTIMEOUT="--read-timeout $argvalue"
            if [ -z "$CURLMAXTIME" ] ; then
                CURLMAXTIME="--max-time $argvalue"
            fi
            if [ -z "$AXELTIMEOUT" ] ; then
                AXELTIMEOUT="--timeout=$argvalue"
            fi
            ;;
        --retry-connrefused)
            WGETRETRYCONNREFUSED="$1"
            CURLRETRYCONNREFUSED="$1"
            ;;
        --trust-server-names)
            WGETRUSTSERVERNAMES="--trust-server-names"
            CURLTRUSTSERVERNAMES="-w '%{url_effective}'"
            ;;
        -t|--tries)
            if [ -z "$argvalue" ];then
                shift
                argvalue="$1"
            fi
            [ -z "$argvalue" ] && fatal "Error: --tries requires an argument"

            case "$argvalue" in
                0|inf)
                    CURLRETRY="--retry 1000"
                    WGETTRIES="--tries $argvalue"
                    ;;

                *)
                    WGETTRIES="--tries $argvalue"
                    CURLRETRY="--retry $(($argvalue-1))"
                    ;;
            esac
            ;;
        --load-cookies)
            if [ -z "$argvalue" ];then
                shift
                argvalue="$1"
            fi
            [ -z "$argvalue" ] && fatal "Error: --load-cookies requires an argument"
            WGETLOADCOOKIES="--load-cookies $argvalue"
            CURLCOOKIE="--cookie $argvalue"
            ;;
        -*)
            fatal "Unknown option '$1', check eget --help."
            ;;
        *)
            return $(($count-$#))
            ;;
    esac
    shift
done
return $(($count-$#))
}

__eget_parse_options "$@"
# no more than 255 options
shift $?
__eget_parse_options $EGET_OPTIONS

#############################3
# defaults

# https://github.com/ipfs/kubo/issues/5541
ipfs_diag_timeout='--timeout 20s'

ipfs_api_local="/ip4/127.0.0.1/tcp/5001"
[ -n "$EGET_IPFS_API" ] && ipfs_api_local="$EGET_IPFS_API"

ipfs_api_brave="/ip4/127.0.0.1/tcp/45005"

# Public IPFS http gateways
ipfs_gateways="https://dhash.ru/ipfs https://ipfs.io/ipfs https://gateway.pinata.cloud/ipfs https://dweb.link/ipfs"

# Test data: https://etersoft.ru/templates/etersoft/images/logo.png
ipfs_checkQm="QmYwf2GAMvHxfFiUFL2Mr6KUG6QrDiupqGc8ms785ktaYw"

get_ipfs_brave()
{
    local ipfs_brave="$(ls ~/.config/BraveSoftware/Brave-Browser/*/*/go-ipfs_* 2>/dev/null | sort | tail -n1)"
    [ -n "$ipfs_brave" ] && [ -x "$ipfs_brave" ] || return
    echo "$ipfs_brave"
}

ipfs_api_access()
{
    [ -n "$IPFS_CMD" ] || fatal "IPFS is disabled"
    if [ -n "$verbose" ] ; then
         verdocmd $IPFS_CMD --api $IPFS_API $ipfs_diag_timeout diag sys >/dev/null
    else
         verdocmd $IPFS_CMD --api $IPFS_API $ipfs_diag_timeout diag sys >/dev/null 2>/dev/null
    fi
}

ipfs_check()
{
    [ -n "$IPFS_CMD" ] || fatal "IPFS is disabled"
    verdocmd $IPFS_CMD --api $IPFS_API $ipfs_diag_timeout cat "$1" >/dev/null
}

check_ipfs_gateway()
{
    local ipfs_gateway="$1"
    # TODO: check checksum
    if docmd eget --check-url "$ipfs_gateway/$ipfs_checkQm" ; then
        ipfs_mode="gateway"
        return
    fi

    if docmd eget --check-site "$(dirname "$ipfs_gateway")" ; then
       info "IPFS gateway $ipfs_gateway is accessible, but can't return shared $ipfs_checkQm"
    else
       info "IPFS gateway $(dirname "$ipfs_gateway") is not accessible"
    fi

    return 1
}

select_ipfs_gateway()
{

    IPFS_GATEWAY=''

    # if set some http gateway, use only it
    if [ -n "$EGET_IPFS_GATEWAY" ] ; then
        check_ipfs_gateway "$EGET_IPFS_GATEWAY" && IPFS_GATEWAY="$EGET_IPFS_GATEWAY" || ipfs_mode="disabled"
        return
    fi

    # check public http gateways
    for ipfs_gateway in $ipfs_gateways ; do
        check_ipfs_gateway $ipfs_gateway || continue
        IPFS_GATEWAY="$ipfs_gateway"
        return
    done

    ipfs_mode="disabled"
    return 1
}


select_ipfs_mode()
{
    IPFS_CMD="$(print_command_path ipfs)"
    if [ -n "$IPFS_CMD" ] ; then
        IPFS_API="$ipfs_api_local"
        if ipfs_api_access ; then
            ipfs_mode="local" && return
            #if ipfs_check "$ipfs_checkQm" ; then
            #    ipfs_mode="local" && return
            #else
            #    info "Skipped local: it is accessible via $IPFS_CMD --api $IPFS_API, but can't return shared $ipfs_checkQm"
            #fi
        fi
    fi

    # disabled, browser not for mass downloading
    IPFS_CMD="$(get_ipfs_brave)"
    # if no EGET_IPFS_API, check brave
    if false && [ -z "$EGET_IPFS_API" ] && [ -n "$IPFS_CMD" ] ; then
        IPFS_API="$ipfs_api_brave"
        if ipfs_api_access ; then
            ipfs_mode="brave" && return
            #if ipfs_check "$ipfs_checkQm" ; then
            #    ipfs_mode="brave" && return
            #else
            #    info "Skipped Brave: it is accessible via $IPFS_CMD --api $IPFS_API, but can't return shared $ipfs_checkQm"
            #fi
        fi
    fi

    select_ipfs_gateway
}


# Functions for work with eget ipfs db
get_cid_by_url()
{
    local URL="$1"
    [ -r "$EGET_IPFS_DB" ] || return
    is_fileurl "$URL" && return 1
    tac "$EGET_IPFS_DB" | grep -F "$URL Qm" | cut -f2 -d" " | grep -E "Qm[[:alnum:]]{44}" | head -n1
}

put_cid_and_url()
{
    local URL="$1"
    local CID="$2"
    local FN="$3"
    [ -w "$EGET_IPFS_DB" ] || return

    is_fileurl "$URL" && return

    local ac="$(get_url_by_cid)"
    if [ "$ac" = "$URL" ] ; then
        info "CID $CID is already exist as the same URL $URL in IPFS DB, skipping"
        return
    fi
    echo "$URL $CID $FN" >> "$EGET_IPFS_DB"
    info "Placed in $EGET_IPFS_DB: $URL $CID $FN"
}

get_filename_by_cid()
{
    local CID="$1"
    [ -z "$EGET_IPFS_DB" ] && basename "$CID" && return
    tac "$EGET_IPFS_DB" | grep -F " $CID " | head -n1 | cut -f3 -d" "
}

get_url_by_cid()
{
    local CID="$1"
    [ -z "$EGET_IPFS_DB" ] && echo "$CID" && return
    tac "$EGET_IPFS_DB" | grep -F " $CID " | head -n1 | cut -f1 -d" "
}

###################


ipfs_mode="$EGET_IPFS"

# enable auto mode when set $EGET_IPFS_DB
[ -z "$ipfs_mode" ] && [ -n "$EGET_IPFS_DB" ] && ipfs_mode="auto"

if [ -n "$LISTONLY$CHECKURL$CHECKSITE" ] ; then
    ipfs_mode=""
    EGET_IPFS_DB=''
fi


if [ -n "$ipfs_mode" ] && [ -n "$EGET_IPFS_DB" ] ; then
    ddb="$(dirname "$EGET_IPFS_DB")"
    if [ -d "$ddb" ] ; then
        info "Using eget IPFS db $EGET_IPFS_DB"
        [ -r "$EGET_IPFS_DB" ] || touch "$EGET_IPFS_DB"
    else
        EGET_IPFS_DB=''
    fi
fi


# detect if we run with ipfs:// or with auto
if is_ipfsurl "$1" && [ -z "$ipfs_mode" ] || [ "$ipfs_mode" = "auto" ] ; then
    info "Autodetecting available IPFS relay..."
    select_ipfs_mode
    info "Auto selected IPFS mode: $ipfs_mode"
    [ "$ipfs_mode" = "gateway" ] && info "Since the local ipfs service is not accessible, the http gateway will be used."
else
    [ "$ipfs_mode" = "gateway" ] && select_ipfs_gateway
    [ -n "$ipfs_mode" ] && info "IPFS mode: $ipfs_mode"
fi

IPFS_CMD=''

if [ "$ipfs_mode" = "disabled" ] ; then

ipfs_get()
{
    fatal "IPFS is disabled"
}

ipfs_put()
{
    fatal "IPFS is disabled"
}

ipfs_cat()
{
    fatal "IPFS is disabled"
}


elif [ "$ipfs_mode" = "brave" ] ; then
    IPFS_CMD="$(get_ipfs_brave)" || fatal "Can't find ipfs command in Brave"
    IPFS_PRETTY_CMD="~Brave-Browser/$(basename "$IPFS_CMD")"
    IPFS_API="$ipfs_api_brave"
    ipfs_api_access || fatal "Can't access to Brave IPFS API (Brave browser is not running and IPFS is not activated?)"
    info "Will use $IPFS_PRETTY_CMD --api $IPFS_API"

elif [ "$ipfs_mode" = "local" ] ; then
    IPFS_CMD="$(print_command_path ipfs)" || fatal "Can't find ipfs command"
    IPFS_PRETTY_CMD="$IPFS_CMD"
    IPFS_API="$ipfs_api_local"
    ipfs_api_access || fatal "Can't access to IPFS API (ipfs daemon is not running?)"
    info "Will use $IPFS_PRETTY_CMD --api $IPFS_API"

elif [ "$ipfs_mode" = "gateway" ] ; then
    info "Will use eget $IPFS_GATEWAY/HASH"

ipfs_get_real_url()
{
    [ -n "$IPFS_GATEWAY" ] || fatal "ipfs http gateway is not set"
    echo "$IPFS_GATEWAY/$1"
}

ipfs_get()
{
    if [ -n "$2" ] ; then
        docmd eget -O "$2" "$(ipfs_get_real_url "$1")"
    else
        docmd eget "$(ipfs_get_real_url "$1")"
    fi
}

ipfs_cat()
{
    # FIXME:
    ipfs_get "$1" "-"
}

ipfs_put()
{
    info "IPFS put skipped when a gateway is used"
    return 1
}
elif [ -z "$ipfs_mode" ] ; then
    :
else
    fatal "Unsupported eget ipfs mode $ipfs_mode"
fi

if [ -n "$IPFS_CMD" ] ; then

ipfs_get_real_url()
{
    return 1
}

ipfs_get()
{
    [ -n "$IPFS_CMD" ] || fatal "ipfs api is not usable"
    if [ -n "$2" ] ; then
        showcmd $IPFS_PRETTY_CMD --api $IPFS_API get -o "$2" "$1"
        $IPFS_CMD --api $IPFS_API get -o "$2" "$1"
    else
        showcmd $IPFS_PRETTY_CMD --api $IPFS_API get "$1"
        $IPFS_CMD --api $IPFS_API get "$1"
    fi
}

ipfs_put()
{
    [ -n "$IPFS_CMD" ] || fatal "ipfs api is not usable"

    # detect if -q is used (will output Qm instead of addded Qm)
    local qu="$1"
    [ "$qu" = "-q" ] || qu=''

    showcmd $IPFS_PRETTY_CMD --api $IPFS_API add "$@"

    local res
    res="$($IPFS_CMD --api $IPFS_API add "$@")" || return

    if [ -z "$qu" ] ; then
        res="$(echo "$res" | grep "^added Qm")" || return
        res="$(echo "$res" | cut -f2 -d" ")"
    fi

    is_ipfs_hash "$res" && echo "$res" && return
    fatal "Can't recognize $res IPFS hash"
}

ipfs_cat()
{
    [ -n "$IPFS_CMD" ] || fatal "ipfs api is not usable"
    showcmd $IPFS_PRETTY_CMD --api $IPFS_API cat "$1"
    $IPFS_CMD --api $IPFS_API cat "$1"
}

fi
###############################



WGET="$(print_command_path wget)"
CURL="$(print_command_path curl)"
ARIA2="$(print_command_path aria2)"
AXEL="$(print_command_path axel)"
RSYNC="$(print_command_path rsync)"

ORIG_EGET_BACKEND="$EGET_BACKEND"

# override backend
if is_fileurl "$1" ; then
    EGET_BACKEND="file"
elif is_ipfsurl "$1" ; then
    EGET_BACKEND="ipfs"
elif is_rsyncurl "$1" || is_sshurl "$1" ; then
    EGET_BACKEND="rsync"
fi

orig_EGET_BACKEND="$EGET_BACKEND"
EGET_BACKEND="$(basename "$EGET_BACKEND")"

case "$orig_EGET_BACKEND" in
    file|ipfs|rsync)
        ;;
    */wget)
        WGET="$orig_EGET_BACKEND"
        [ -x "$WGET" ] || fatal "There are no $orig_EGET_BACKEND in the system but you forced using it via EGET_BACKEND. Install it with $ epm install wget"
        ;;
    wget)
        [ -n "$WGET" ] || fatal "There are no wget in the system but you forced using it via EGET_BACKEND. Install it with $ epm install wget"
        ;;
    */curl)
        CURL="$orig_EGET_BACKEND"
        [ -x "$CURL" ] || fatal "There are no $orig_EGET_BACKEND in the system but you forced using it via EGET_BACKEND. Install it with $ epm install curl"
        ;;
    curl)
        [ -n "$CURL" ] || fatal "There are no curl in the system but you forced using it via EGET_BACKEND. Install it with $ epm install curl"
        ;;
    */aria2)
        ARIA2="$orig_EGET_BACKEND"
        [ -x "$ARIA2" ] || fatal "There are no $orig_EGET_BACKEND in the system but you forced using it via EGET_BACKEND. Install it with $ epm install aria2"
        ;;
    aria2)
        [ -n "$ARIA2" ] || fatal "There are no aria2 in the system but you forced using it via EGET_BACKEND. Install it with $ epm install aria2"
        ;;
    */axel)
        AXEL="$orig_EGET_BACKEND"
        [ -x "$AXEL" ] || fatal "There are no $orig_EGET_BACKEND in the system but you forced using it via EGET_BACKEND. Install it with $ epm install axel"
        ;;
    axel)
        [ -n "$AXEL" ] || fatal "There are no axel in the system but you forced using it via EGET_BACKEND. Install it with $ epm install axel"
        ;;
    '')
        [ -n "$WGET" ] && EGET_BACKEND="wget"
        [ -z "$EGET_BACKEND" ] && [ -n "$CURL" ] && EGET_BACKEND="curl"
        [ -n "$EGET_BACKEND" ] || fatal "There are no wget nor curl in the system. Install something with $ epm install wget"
        ;;
    *)
        fatal "Uknown EGET_BACKEND $EGET_BACKEND"
        ;;
esac


# Wrapper for wget command with all configured options
# Defined globally so it can be used by aria2/axel backends for header operations
__wget()
{
    [ -n "$USERAGENT" ] && set -- -U "$USERAGENT" "$@"
    [ -n "$USEOUTPUTDIR" ] && set -- -P "$USEOUTPUTDIR" "$@"
    docmd $WGET $FORCEIPV $WGETQ $WGETSHOWPROGRESS $NOGLOB $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES $WGETRUSTSERVERNAMES $EGET_WGET_OPTIONS "$@"
}

# wget wrapper for downloads (adds -c/-N flags)
__wget_download()
{
    [ -n "$CONTINUE" ] && set -- -c "$@"
    [ -n "$TIMESTAMPING" ] && set -- -N "$@"
    __wget "$@"
}

# Helper function for timestamping with aria2/axel (needs to be outside backend blocks)
__timestamping_download()
{
    local URL="$1"
    local TARGETFILE="$2"
    local DOWNLOAD_CMD="$3"  # will be "__aria2 ..." or "__axel ..."

    # If file doesn't exist, just download
    [ ! -f "$TARGETFILE" ] && eval "$DOWNLOAD_CMD" && return

    # Get Last-Modified header
    local remote_modified="$(url_get_header "$URL" "Last-Modified")"

    if [ -z "$remote_modified" ] ; then
        info "Warning: Server did not provide Last-Modified header, downloading anyway"
        eval "$DOWNLOAD_CMD"
        return
    fi

    # Convert HTTP date to unix timestamp
    local remote_timestamp="$(date -d "$remote_modified" '+%s' 2>/dev/null)"
    if [ -z "$remote_timestamp" ] ; then
        info "Warning: Could not parse Last-Modified date '$remote_modified', downloading anyway"
        eval "$DOWNLOAD_CMD"
        return
    fi

    # Get local file timestamp
    local local_timestamp="$(stat -c '%Y' "$TARGETFILE" 2>/dev/null)"
    [ -z "$local_timestamp" ] && local_timestamp=0

    # Compare timestamps
    if [ "$remote_timestamp" -gt "$local_timestamp" ] ; then
        info "Remote file is newer, downloading..."
        eval "$DOWNLOAD_CMD" || return 1
        # Set timestamp from server
        touch -d "$remote_modified" "$TARGETFILE" 2>/dev/null
    else
        info "Local file is up to date, skipping download"
    fi
}

if [ "$EGET_BACKEND" = "file" ] ; then

# put remote content to stdout
url_scat()
{
    local URL="$1"
    cat "$(path_from_url "$URL")"
}

# download to default name of to $2
url_sget()
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
       cp -av "$(path_from_url "$URL")" "$2"
       return
    fi
    cp -av "$(path_from_url "$URL")" .
}

url_pget()
{
    #[ -n "$USEOUTPUTDIR" ] || fatal "USEOUTPUTDIR is not set"
    local URL
    for URL in "$@" ; do
        cp -av "$(path_from_url "$URL")" "$USEOUTPUTDIR"
    done
}

url_check_accessible()
{
    local URL="$1"
    test -f "$(path_from_url "$URL")"
}

url_check_available()
{
    local URL="$1"
    test -f "$(path_from_url "$URL")"
}

url_get_filename()
{
    basename "$1"
}

url_get_real_url()
{
    echo "$1"
}

elif [ "$EGET_BACKEND" = "ipfs" ] ; then

# put remote content to stdout
url_scat()
{
    local URL="$1"
    ipfs_cat "$(cid_from_url "$URL")"
}

# download to default name of to $2
url_sget()
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
       ipfs_get "$(cid_from_url "$URL")" "$2"
       return
    fi

    local fn="$(url_print_filename_from_url "$URL")"
    if [ -n "$fn" ] ; then
       ipfs_get "$(cid_from_url "$URL")" "$fn"
       return
    fi

    ipfs_get "$(cid_from_url "$URL")"
}

url_pget()
{
    #[ -n "$USEOUTPUTDIR" ] || fatal "USEOUTPUTDIR is not set"
    local URL
    for URL in "$@" ; do
        local fn="$(url_print_filename_from_url "$URL")"
        if [ -z "$fn" ] ; then
           fn="$(basename "$URL")"
        fi
        ipfs_get "$(cid_from_url "$URL")" "$USEOUTPUTDIR/$fn"
    done
}

url_check_accessible()
{
    local URL="$1"
    # TODO: improve me
    scat "$URL" >/dev/null
}

url_check_available()
{
    local URL="$1"
    # TODO: improve me
    scat "$URL" >/dev/null
}

url_print_filename_from_url()
{
    local URL="$1"
    local fn="$(echo "$URL" | sed -e 's|ipfs://.*\?filename=||')"
    [ "$URL" != "$fn" ] && echo "$fn" && return
}

url_get_filename()
{
    local URL="$1"
    url_print_filename_from_url "$URL" && return
    local CID="$(cid_from_url "$URL")"
    get_filename_by_cid "$CID"
}

url_get_real_url()
{
    local URL="$1"
    local CID="$(cid_from_url "$URL")"
    # if we use gateway, return URL with gateway
    ipfs_get_real_url "$URL" && return
    get_url_by_cid "$CID"
}


elif [ "$EGET_BACKEND" = "wget" ] ; then

# put remote content to stdout
url_scat()
{
    local URL="$1"
    download_with_mirroring __wget "$URL" -O- && return
    local RES=$?
    [ -n "$quiet" ] || return $RES
    unset_quiet
    download_with_mirroring __wget "$URL" -O-
}

# download to default name of to $2
url_sget()
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
       download_with_mirroring __wget_download "$URL" -O "$2"
       return
    fi
# TODO: поддержка rsync для известных хостов?
# Не качать, если одинаковый размер и дата
# -nc
# TODO: overwrite always
    download_with_mirroring __wget_download "$URL" $WGETNAMEOPTIONS
}

url_pget()
{
    #[ -n "$USEOUTPUTDIR" ] || fatal "USEOUTPUTDIR is not set"
    download_with_mirroring __wget_download $WGETNAMEOPTIONS "$@"
}

# Download file with mirror fallback (try each URL until success)
sget_with_mirrors()
{
    local URL
    for URL in "$@" ; do
        __wget_download $WGETNAMEOPTIONS "$URL" && return 0
    done
    return 1
}

url_get_response()
{
    local URL="$1"
    local answer
    answer="$(quiet=1 __wget --timeout 20 --tries 1 --spider -S "$URL" 2>&1)"
    # HTTP/1.1 405 Method Not Allowed
    # HTTP/1.1 404 Not Found
    if echo "$answer" | grep -q "^ *HTTP/[12.]* 40[45]" ; then
        (quiet=1 __wget -O/dev/null --header="Range: bytes=0-0" -S "$URL" 2>&1)
        return
    fi
    echo "$answer"
}


elif [ "$EGET_BACKEND" = "curl" ] ; then

__curl()
{
    [ -n "$USERAGENT" ] && set -- -A "$USERAGENT" "$@"
    [ -n "$USEOUTPUTDIR" ] && set -- --create-dirs --output-dir "$USEOUTPUTDIR" "$@"
    docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLSHOWPROGRESS $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE $CURLTRUSTSERVERNAMES $EGET_CURL_OPTIONS "$@"
}

# curl wrapper for downloads (adds -C - for continue)
__curl_download()
{
    [ -n "$CONTINUE" ] && set -- -C - "$@"
    __curl "$@"
}
# put remote content to stdout
url_scat()
{
    local URL="$1"
    download_with_mirroring __curl "$URL" --output - && return
    local RES=$?
    [ -n "$quiet" ] || return $RES
    unset_quiet
    download_with_mirroring __curl "$URL" --output -
}

# Helper to check Last-Modified for curl timestamping
__curl_check_timestamp()
{
	local URL="$1"
	[ -n "$quiet" ] && return 0  # Skip check in quiet mode

	local last_modified="$(url_get_header "$URL" "Last-Modified")"
	if [ -z "$last_modified" ] ; then
		info "Warning: Server did not provide Last-Modified header, time-stamps turned off"
	fi
}

# download to default name of to $2
url_sget()
{
    local URL="$1"
    local res
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$1"
       return
    elif [ -n "$2" ] ; then
       if [ -n "$TIMESTAMPING" ] && [ -f "$2" ] ; then
           __curl_check_timestamp "$URL"
           download_with_mirroring __curl_download "$URL" -z "$2" --output "$2"
       else
           download_with_mirroring __curl_download "$URL" --output "$2"
       fi
       return
    fi

    local FILENAME="$(url_get_filename "$URL")"
    if [ -n "$FILENAME" ] ; then
        if [ -n "$TIMESTAMPING" ] && [ -f "$FILENAME" ] ; then
            __curl_check_timestamp "$URL"
            download_with_mirroring __curl_download "$URL" -z "$FILENAME" $CURLNAMEOPTIONS --output "$FILENAME"
        else
            download_with_mirroring __curl_download "$URL" $CURLNAMEOPTIONS --output "$FILENAME"
        fi
        return
    fi

    if [ -n "$TIMESTAMPING" ] ; then
        # Need to get filename first to use -z
        FILENAME="$(basename "$URL")"
        if [ -f "$FILENAME" ] ; then
            __curl_check_timestamp "$URL"
            download_with_mirroring __curl_download "$URL" -z "$FILENAME" $CURLFILENAMEOPTIONS
        else
            download_with_mirroring __curl_download "$URL" $CURLFILENAMEOPTIONS
        fi
    else
        download_with_mirroring __curl_download "$URL" $CURLFILENAMEOPTIONS
    fi
}

url_pget()
{
    #[ -n "$USEOUTPUTDIR" ] || fatal "USEOUTPUTDIR is not set"
    # curl needs -O before each URL for parallel downloads
    local args=""
    local URL
    for URL in "$@" ; do
        args="$args $CURLFILENAMEOPTIONS $URL"
    done
    __curl_download --parallel $args
}

# Download file with mirror fallback (try each URL until success)
sget_with_mirrors()
{
    local URL
    for URL in "$@" ; do
        __curl_download $CURLFILENAMEOPTIONS "$URL" && return 0
    done
    return 1
}

url_get_response()
{
    local URL="$1"
    local answer
    # Don't use -C - for header requests
    answer="$(quiet=1 __curl --max-time 20 --retry 0 -LI "$URL" 2>&1)"
    # HTTP/1.1 405 Method Not Allowed
    # HTTP/1.1 404 Not Found
    if echo "$answer" | grep -q "^ *HTTP/[12.]* 40[45]" ; then
        (quiet=1 __curl --max-time 20 --retry 0 -L -i -r0-0 "$URL" 2>&1)
        return
    fi
    echo "$answer"
}

elif [ "$EGET_BACKEND" = "aria2" ] ; then
__aria2()
{
    [ -n "$USEOUTPUTDIR" ] && set -- -d "$USEOUTPUTDIR" "$@"
    docmd $ARIA2 $ARIA2Q $ARIA2SHOWPROGRESS $EGET_ARIA2_OPTIONS "$@"
}

# aria2 wrapper for downloads (adds --continue)
__aria2_download()
{
    [ -n "$CONTINUE" ] && set -- --continue=true "$@"
    __aria2 "$@"
}

# put remote content to stdout
url_scat()
{
    # aria2 -o - creates file named "-", not stdout output, fallback to wget
    local URL="$1"
    download_with_mirroring __wget "$URL" -O- && return
    local RES=$?
    [ -n "$quiet" ] || return $RES
    unset_quiet
    download_with_mirroring __wget "$URL" -O-
}

# download to default name of to $2
url_sget()
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
       if [ -n "$TIMESTAMPING" ] ; then
           __timestamping_download "$URL" "$2" "download_with_mirroring __aria2_download -x1 -s1 --allow-piece-length-change=false --allow-overwrite=true -o \"$2\" \"$URL\""
       else
           download_with_mirroring __aria2_download -x1 -s1 --allow-piece-length-change=false --allow-overwrite=true -o "$2" "$URL"
       fi
       return
    fi

    # No explicit output file
    if [ -n "$TIMESTAMPING" ] ; then
        # Get filename first for timestamping
        local FILENAME="$(url_get_filename "$URL")"
        [ -z "$FILENAME" ] && FILENAME="$(basename "$URL")"
        __timestamping_download "$URL" "$FILENAME" "download_with_mirroring __aria2_download --allow-overwrite=true \"$URL\""
    else
        download_with_mirroring __aria2_download --allow-overwrite=true "$URL"
    fi
}

url_pget()
{
    #[ -n "$USEOUTPUTDIR" ] || fatal "USEOUTPUTDIR is not set"
    echo "$@" | xargs -n1 | download_with_mirroring __aria2_download --allow-overwrite=true -i-
}

# Download file from multiple mirrors simultaneously (TAB-separated for aria2)
sget_with_mirrors()
{
    # aria2 supports multiple URLs for same file separated by TAB
    local tab_urls="$(echo "$@" | tr ' ' '\t')"
    echo "$tab_urls" | __aria2_download -i-
}

# use __wget for headers (aria2/axel don't support this natively)
url_get_response()
{
    local URL="$1"
    local answer
    answer="$(quiet=1 __wget --timeout 20 --tries 1 --spider -S "$URL" 2>&1)"
    # HTTP/1.1 405 Method Not Allowed
    # HTTP/1.1 404 Not Found
    if echo "$answer" | grep -q "^ *HTTP/[12.]* 40[45]" ; then
        (quiet=1 __wget -O/dev/null --header="Range: bytes=0-0" -S "$URL" 2>&1)
        return
    fi
    echo "$answer"
}

elif [ "$EGET_BACKEND" = "axel" ] ; then
__axel()
{
    local AXELQ_LOCAL="$AXELQ"
    # --show-progress overrides --quiet for axel
    [ -n "$AXELSHOWPROGRESS" ] && AXELQ_LOCAL=''
    [ -n "$USERAGENT" ] && set -- --user-agent="$USERAGENT" "$@"
    # Note: axel doesn't support output directory, only output file (-o)
    docmd $AXEL $FORCEIPV $AXELQ_LOCAL $AXELTIMEOUT $AXELHEADER $AXELNOSSLCHECK $EGET_AXEL_OPTIONS "$@"
}

# axel wrapper for downloads
# Note: axel auto-continues if state file (.st) exists, no explicit flag needed
__axel_download()
{
    __axel "$@"
}

# Remove target file and state file before download (axel can't overwrite)
__axel_clean()
{
    local target="$1"
    rm -f "$target" "$target.st" 2>/dev/null
}

# put remote content to stdout
url_scat()
{
    # axel doesn't support stdout output, fallback to wget
    local URL="$1"
    download_with_mirroring __wget "$URL" -O- && return
    local RES=$?
    [ -n "$quiet" ] || return $RES
    unset_quiet
    download_with_mirroring __wget "$URL" -O-
}

# download to default name of to $2
url_sget()
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
       if [ -n "$TIMESTAMPING" ] ; then
           __timestamping_download "$URL" "$2" "download_with_mirroring __axel_download --alternate -o \"$2\" \"$URL\""
       else
           # axel can't overwrite, clean first
           __axel_clean "$2"
           download_with_mirroring __axel_download --alternate -o "$2" "$URL"
       fi
       return
    fi

    # No explicit output file - get filename from URL
    local FILENAME="$(url_get_filename "$URL")"
    [ -z "$FILENAME" ] && FILENAME="$(basename "$URL")"
    if [ -n "$TIMESTAMPING" ] ; then
        __timestamping_download "$URL" "$FILENAME" "download_with_mirroring __axel_download --alternate \"$URL\""
    else
        # axel can't overwrite, clean first
        __axel_clean "$FILENAME"
        download_with_mirroring __axel_download --alternate "$URL"
    fi
}

url_pget()
{
    #[ -n "$USEOUTPUTDIR" ] || fatal "USEOUTPUTDIR is not set"
    # axel doesn't support output directory, use cd workaround
    local URL FILENAME
    local oldpwd="$PWD"
    cd "$USEOUTPUTDIR" || return 1
    for URL in "$@" ; do
        # axel can't overwrite, clean first
        FILENAME="$(basename "$URL")"
        __axel_clean "$FILENAME"
        download_with_mirroring __axel_download --alternate "$URL"
    done
    cd "$oldpwd"
}

# Download file from multiple mirrors simultaneously (axel supports multiple URLs)
sget_with_mirrors()
{
    # axel doesn't support output directory, use cd workaround
    local oldpwd="$PWD"
    [ -n "$USEOUTPUTDIR" ] && { cd "$USEOUTPUTDIR" || return 1; }
    # axel can't overwrite, clean first (all URLs point to same file)
    __axel_clean "$(basename "$1")"
    __axel_download --alternate "$@"
    [ -n "$USEOUTPUTDIR" ] && cd "$oldpwd"
}

# use __wget for headers (aria2/axel don't support this natively)
url_get_response()
{
    local URL="$1"
    local answer
    answer="$(quiet=1 __wget --timeout 20 --tries 1 --spider -S "$URL" 2>&1)"
    # HTTP/1.1 405 Method Not Allowed
    # HTTP/1.1 404 Not Found
    if echo "$answer" | grep -q "^ *HTTP/[12.]* 40[45]" ; then
        (quiet=1 __wget -O/dev/null --header="Range: bytes=0-0" -S "$URL" 2>&1)
        return
    fi
    echo "$answer"
}

elif [ "$EGET_BACKEND" = "rsync" ] ; then

__rsync()
{
    local opts=""
    [ -n "$quiet" ] && opts="$opts -q"
    [ -n "$verbose" ] && opts="$opts -v"
    [ -n "$TIMESTAMPING" ] && opts="$opts -u"
    docmd $RSYNC $opts $EGET_RSYNC_OPTIONS "$@"
}

# put remote content to stdout
url_scat()
{
    local URL="$1"
    # rsync can't output to stdout, use temp file
    local tmpfile="$(mktemp)"
    __rsync "$URL" "$tmpfile" && cat "$tmpfile"
    local res=$?
    rm -f "$tmpfile"
    return $res
}

# download to default name or to $2
url_sget()
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
        scat "$URL"
        return
    elif [ -n "$2" ] ; then
        __rsync "$URL" "$2"
        return
    fi
    # Download to current directory with original name
    __rsync "$URL" .
}

url_pget()
{
    local URL
    local destdir="${USEOUTPUTDIR:-.}"
    for URL in "$@" ; do
        __rsync "$URL" "$destdir/"
    done
}

# rsync doesn't support HTTP headers
url_get_response()
{
    warning "rsync:// does not support HTTP headers"
    return 1
}

url_get_filename()
{
    basename "$1"
}

url_get_real_url()
{
    echo "$1"
}

else
    fatal "Unknown EGET_BACKEND '$EGET_BACKEND', logical error."
fi


# Common code for both wget and curl and aria2 (http related)
if [ "$EGET_BACKEND" = "wget" ] || [ "$EGET_BACKEND" = "curl" ] || [ "$EGET_BACKEND" = "aria2" ] || [ "$EGET_BACKEND" = "axel" ] ; then

url_get_headers()
{
    local URL="$1"
    url_get_response "$URL" | grep -i "^ *[[:alpha:]].*: " | sed -e 's|^ *||' -e 's|\r$||'
}

url_check_accessible()
{
    local URL="$1"
    url_get_response "$URL" | grep "HTTP/[0-9]\.[0-9] [0-9]" | tail -n1 | grep -q -w "200\|404"
}

url_check_available()
{
    local URL="$1"
    url_get_response "$URL" | grep "HTTP/[0-9]\.[0-9] [0-9]" | tail -n1 | grep -q -w "200"
}

url_get_header()
{
    local URL="$1"
    local HEADER="$2"
    url_get_headers "$URL" | grep -i "^ *$HEADER: " | sed -e "s|^ *$HEADER: ||i"
}

url_get_raw_real_url()
{
    local URL="$1"

    ! is_httpurl "$URL" && echo "$URL" && return

    # don't check location if we have made form of the URL
    [ -n "$MADEURL" ] && [ "$MADEURL" = "$URL" ] && echo "$URL" && return

    local loc
    for loc in $(url_get_header "$URL" "Location" | tac | sed -e 's| .*||') ; do
        # add protocol if missed
        if echo "$loc" | grep -q '^//' ; then
            loc="$(echo "$URL" | sed -e 's|//.*||')$loc"
        fi
        # hack for construct full url from related Location
        if is_abs_path "$loc" ; then
            loc="$(concatenate_url_and_filename "$(get_host_only "$URL")" "$loc")" #"
        fi
        echo "$loc"
        return
    done

    echo "$URL"
}

url_get_real_url()
{
    local URL="$1"
    local loc
    loc="$(url_get_raw_real_url "$URL")"

    # we need stay with original url due redirect tags
    if ! is_strange_url "$loc" ; then
        echo "$loc"
        return
    fi

    echo "$URL"
}

url_get_filename()
{
    local URL="$1"

    ! is_httpurl "$URL" && basename "$URL" && return

    local filename

    # See https://www.cpcwood.com/blog/5-aws-s3-utf-8-content-disposition
    # https://www.rfc-editor.org/rfc/rfc6266
    local cd="$(url_get_header "$URL" "Content-Disposition")"
    if echo "$cd" | grep -qi "filename\*= *UTF-8" ; then
        #Content-Disposition: attachment; filename="unityhub-amd64-3.3.0.deb"; filename*=UTF-8''"unityhub-amd64-3.3.0.deb"
        #Content-Disposition: attachment; filename*=UTF-8''t1client-standalone-4.5.28.0-1238402-Release.deb; filename="t1client-standalone-4.5.28.0-1238402-Release.deb"
        filename="$(echo "$cd" | sed -e "s|.*filename\*= *UTF-8''||i" -e 's|^"||' -e 's|";$||' -e 's|"$||' -e 's|; filename=.*||')"
        [ "$filename" != "unspecified" ] && echo "$filename" && return
    fi
    if echo "$cd" | grep -qi "filename=" ; then
        #Content-Disposition: attachment; filename=postman-linux-x64.tar.gz
        #content-disposition: attachment; filename="code-1.77.1-1680651749.el7.x86_64.rpm"
        filename="$(echo "$cd" | sed -e 's|.*filename= *||i' -e 's|^"||' -e 's|";.*||' -e 's|"$||')"
        [ "$filename" != "unspecified" ] && echo "$filename" && return
    fi

    local loc="$(url_get_raw_real_url "$URL")"
    if is_strange_url "$loc" ; then
        loc="$(echo "$loc" | sed -e "s|\?.*||")"
    fi

    # hack for redirect to the main page
    if dirname "$loc" | grep -q "^http" ; then
        loc=""
    fi

    # If real URL resolution failed, fallback to original URL
    [ -n "$loc" ] || loc="$URL"

    if is_strange_url "$loc" ; then
        loc="$(echo "$loc" | sed -e "s|\?.*||")"
    fi

    [ -n "$loc" ] || return
    filename="$(basename "$loc")"
    [ "$filename" = "redirect" ] && return 1
    echo "$filename"
}

fi


if [ -n "$ipfs_mode" ] && [ -n "$EGET_IPFS_DB" ] &&  ! is_ipfsurl "$1"  ; then

download_to_ipfs()
{
    local URL="$1"
    local res
    #res="$(url_scat "$URL" | ipfs_put )" || return
    #res="$(echo "$res" | grep "^added Qm")" || return 1
    #CID="$(echo "$res" | cut -f2 -d" ")"
    # with -q to disable progress (mixed with download progress)
    res="$(url_scat "$URL" | ipfs_put -q)" || return
    is_ipfs_hash "$res" || return 1
    echo "$res"
}

# put remote content to stdout
scat()
{
    local URL="$1"
    url_scat "$URL"

    # It is list only function. Don't save to IPFS
    return

    ###################

    local CID="$(get_cid_by_url "$URL")"
    if [ -n "$CID" ] && [ -z "$EGET_IPFS_FORCE_LOAD" ] ; then
        info "$URL -> $CID"
        ipfs_cat "$CID"
        return
    fi

    CID="$(download_to_ipfs "$URL")" || return

    ipfs_cat "$CID" || return

    local FN="$(url_get_filename "$URL")" || return

    put_cid_and_url "$URL" "$CID" "$FN"
}

# download to default name of to $2
sget()
{
    local URL="$1"
    local TARGET="$2"

    if [ -n "$GETFILENAME" ] ; then
        get_filename "$URL"
        exit
    fi

    local REALURL="$(get_real_url "$URL")" || return

    if [ -n "$GETREALURL" ] ; then
        echo "$REALURL"
        exit
    fi

    # skip ipfs for cat
    if [ "$TARGET" = "/dev/stdout" ] || [ "$TARGET" = "-" ] ; then
       url_scat "$URL"
       return
    fi

    # Check if target file exists (unless --force, -c, or -N is used)
    if [ -n "$TARGET" ] && [ -f "$TARGET" ] ; then
        if [ -n "$TIMESTAMPING" ] ; then
            # -N: handled by backend timestamping logic
            :
        elif [ -n "$CONTINUE" ] ; then
            # -c: pass to backend for resume
            :
        elif [ -n "$FORCEOVERWRITE" ] ; then
            # --force: will overwrite
            :
        else
            fatal "File '$TARGET' already exists. Use --force to overwrite, -c to continue, or -N for timestamping."
        fi
    fi

    #if is_strange_url "$REALURL" ; then
    #    info "Just download strange URL $REALURL, skipping IPFS"
    #    url_sget "$REALURL" "$TARGET"
    #    return
    #fi

    local CID="$(get_cid_by_url "$REALURL")"
    if [ -n "$CID" ] && [ -z "$EGET_IPFS_FORCE_LOAD" ] ; then

        if [ -n "$GETIPFSCID" ] ; then
            echo "$CID"
            exit
        fi

        if [ -n "$GETFILENAME" ] ; then
            get_filename_by_cid "$CID"
            exit
        fi

        if [ -n "$GETREALURL" ] ; then
            get_url_by_cid "$CID"
            exit
        fi

        if [ -z "$TARGET" ] ; then
            # TODO: in some cases we can get name from URL...
            TARGET="$(get_filename_by_cid "$CID")"
            if [ -z "$TARGET" ] ; then
                TARGET="$CID"
            fi
            # Check if target file exists (TARGET was just determined from CID)
            if [ -f "$TARGET" ] ; then
                if [ -z "$TIMESTAMPING" ] && [ -z "$CONTINUE" ] && [ -z "$FORCEOVERWRITE" ] ; then
                    fatal "File '$TARGET' already exists. Use --force to overwrite, -c to continue, or -N for timestamping."
                fi
            fi
        fi
        [ "$URL" = "$REALURL" ] && info "$URL -> $CID -> $TARGET" || info "$URL -> $REALURL -> $CID -> $TARGET"
        ipfs_get "$CID" "$TARGET" && return

        # fail get from IPFS, fallback
        url_sget "$REALURL" "$TARGET"
        return
    fi


    # download and put to IPFS
    local FN="$(url_get_filename "$REALURL")" || return
    if [ -z "$TARGET" ] ; then
        TARGET="$FN"
        # Check if target file exists (TARGET was just determined from URL)
        if [ -f "$TARGET" ] ; then
            if [ -z "$TIMESTAMPING" ] && [ -z "$CONTINUE" ] && [ -z "$FORCEOVERWRITE" ] ; then
                fatal "File '$TARGET' already exists. Use --force to overwrite, -c to continue, or -N for timestamping."
            fi
        fi
    fi

    if [ -n "$GETIPFSCID" ] ; then
         # add to IPFS and print out CID
         CID="$(ipfs_put --progress "$REALURL")" || return
         echo "$CID"
         exit
    fi

    # download file and add to IPFS
    url_sget "$REALURL" "$TARGET" || return

    # don't do ipfs put when gateway is using
    [ "$ipfs_mode" = "gateway" ] && return

    CID="$(ipfs_put --progress "$TARGET")" || return

    put_cid_and_url "$REALURL" "$CID" "$FN"
}

check_url_is_available()
{
    local URL="$1"
    local REALURL="$(get_real_url "$URL")" || return
    local CID="$(get_cid_by_url "$REALURL")"
    if [ -n "$CID" ] ; then
        [ "$URL" = "$REALURL" ] && info "$URL -> $CID" || info "$URL -> $REALURL -> $CID"
        ipfs_check "$CID"
        return
    fi

    CID="$(download_to_ipfs "$REALURL")" || return

    local FN="$(url_get_filename "$REALURL")" || return
    ipfs_cat "$CID" >/dev/null || return
    put_cid_and_url "$REALURL" "$CID" "$FN"
}

check_url_is_accessible()
{
    check_url_is_available "$@"
}

get_filename()
{
    url_get_filename "$1"
}

get_real_url()
{
    url_get_real_url "$1"
}

else
scat()
{
    url_scat "$@"
}

sget()
{
    local URL="$1"
    local TARGET="$2"

    if [ -n "$GETFILENAME" ] ; then
        get_filename "$URL"
        exit
    fi

    if [ -n "$GETREALURL" ] ; then
        get_real_url "$URL"
        exit
    fi

    # Skip check for stdout
    if [ "$TARGET" = "/dev/stdout" ] || [ "$TARGET" = "-" ] ; then
        url_sget "$@"
        return
    fi

    # Check if target file exists (unless --force, -c, or -N is used)
    # When TARGET is not specified, determine it from URL
    local TARGET_FROM_URL=""
    if [ -z "$TARGET" ] ; then
        TARGET_FROM_URL="$(url_get_filename "$URL")"
        TARGET="$TARGET_FROM_URL"
    fi
    if [ -n "$TARGET" ] && [ -f "$TARGET" ] ; then
        if [ -z "$TIMESTAMPING" ] && [ -z "$CONTINUE" ] && [ -z "$FORCEOVERWRITE" ] ; then
            fatal "File '$TARGET' already exists. Use --force to overwrite, -c to continue, or -N for timestamping."
        fi
        # With --force and no explicit -O, delete file so wget won't create .1
        if [ -n "$FORCEOVERWRITE" ] && [ -n "$TARGET_FROM_URL" ] ; then
            rm -f "$TARGET"
        fi
    fi

    url_sget "$@"
}

# Read URLs from file, skip empty lines and comments
read_urls_from_file()
{
	local file="$1"
	local line

	if [ "$file" = "-" ] ; then
		# Read from stdin
		while IFS= read -r line ; do
			line="$(echo "$line" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')"
			[ -z "$line" ] && continue
			echo "$line" | grep -q '^[[:space:]]*#' && continue
			echo "$line"
		done
	else
		# Read from file
		[ ! -f "$file" ] && fatal "Error: input file '$file' not found"
		[ ! -r "$file" ] && fatal "Error: input file '$file' is not readable"

		while IFS= read -r line ; do
			line="$(echo "$line" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')"
			[ -z "$line" ] && continue
			echo "$line" | grep -q '^[[:space:]]*#' && continue
			echo "$line"
		done < "$file"
	fi
}

pget()
{
    url_pget "$@"
}

check_url_is_accessible()
{
    url_check_accessible "$@"
}

check_url_is_available()
{
    url_check_available "$@"
}

get_filename()
{
    url_get_filename "$1"
}

get_real_url()
{
    url_get_real_url "$1"
}

fi


get_github_urls()
{
    # https://github.com/OWNER/PROJECT
    local owner="$(echo "$1" | sed -e "s|^https://github.com/||" -e "s|/.*||")" #"
    local project="$(echo "$1" | sed -e "s|^https://github.com/$owner/||" -e "s|/.*||")" #"
    [ -n "$owner" ] || fatal "Can't get owner from $1"
    [ -n "$project" ] || fatal "Can't get project from $1"
    local URL="https://api.github.com/repos/$owner/$project/releases"
    # api sometime returns unformatted json
    scat "$URL" | sed -e 's|,\(["{]\)|,\n\1|g' | \
        grep -i -o -E '"browser_download_url": *"https://.*"' | cut -d'"' -f4
}

# drop file path from URL
get_host_only()
{
    echo "$1/" | grep -Eo '(.*://[^/]+)'
}

concatenate_url_and_filename()
{
    local url="$(echo "$1" | sed -e 's|/*$||' )"
    local fn="$(echo "$2" | sed -e 's|^/*||' )"
    echo "$url/$fn"
}

# MADEURL filled with latest made URL as flag it is end form of URL
MADEURL=''

# Args: URL filename
make_fileurl()
{
    local url="$1"
    local fn="$2"

    fn="$(echo "$fn" | sed -e 's|^./||' -e 's|^/*||')"

    if is_fileurl "$url" ; then
        # if it is url
        :
    elif is_rsyncurl "$url" || is_sshurl "$url" ; then
        # rsync/ssh URLs: just ensure trailing slash
        url="$(echo "$url" | sed 's|/*$|/|')"
    elif is_abs_path "$fn" ; then
        # if there is file path from the root of the site
        url="$(get_host_only "$url")"
    elif ! have_end_slash_or_php_parametr "$url" ; then
        url="$(dirname "$url")"
    fi

    MADEURL="$(concatenate_url_and_filename "$url" "$fn")"
    echo "$MADEURL"
}

get_urls()
{
    if is_fileurl "$URL" ; then
        ls -1 "$(path_from_url "$URL")"
        return
    fi

    # rsync directory listing
    if is_rsyncurl "$URL" || is_sshurl "$URL" ; then
        $RSYNC --list-only "$URL" 2>/dev/null | awk '{print $NF}'
        return
    fi

    local content
    content="$(scat "$URL")"

    # Detect format: HTML (contains <) or ls -l (FTP directory listing from curl)
    if echo "$content" | grep -q '<' ; then
        # HTML format (wget FTP, HTTP): parse href attributes
        echo "$content" | sed -e 's|<|<\n|g' -e 's|data-file=|href=|g' -e "s|href=http|href=\"http|g" -e "s|>|\">|g" -e "s|'|\"|g" | \
             grep -i -o -E 'href="(.+)"' | sed -e 's|&amp;|\&|' | cut -d'"' -f2 | sed -e 's|^ *||g' -e 's| *$||g'
    else
        # ls -l format (curl FTP): extract last field (filename)
        echo "$content" | awk '{print $NF}'
    fi
}

# Check that URL is provided (unless using -i|--input-file)
[ -z "$INPUTFILE" ] && [ -z "$1" ] && fatal "Error: URL is required"

if [ -n "$CHECKURL" ] ; then
    #set_quiet
    URL="$1"
    check_url_is_available "$URL"
    res=$?
    if [ -n "$verbose" ] ; then
        [ "$res" = "0" ] && echo "$URL is accessible via network and file exists" || echo "$URL is NOT accessible via network or file does not exist"
    fi
    exit $res
fi

if [ -n "$CHECKSITE" ] ; then
    #set_quiet
    URL="$1"
    check_url_is_accessible "$URL"
    res=$?
    if [ -n "$verbose" ] ; then
        [ "$res" = "0" ] && echo "$URL is accessible via network" || echo "$URL is NOT accessible via network"
    fi
    exit $res
fi

if [ -n "$GETRESPONSE" ] ; then
    url_get_response "$1"
    exit
fi


# separate part for github downloads
if echo "$1" | grep -q "^https://github.com/" && \
   echo "$1" | grep -q -v "/blob/" && echo "$1" | grep -q -v "/download/" && [ -n "$2" ] ; then
    MASK="$2"

    if [ -n "$LISTONLY" ] ; then
        get_github_urls "$1" | filter_glob "$MASK" | filter_order
        exit
    fi

    ERROR=0
    for fn in $(get_github_urls "$1" | filter_glob "$MASK" | filter_order) ; do
        MADEURL="$fn" # mark it is the end form of the URL
        sget "$fn" "$TARGETFILE" || ERROR=1
        [ -n "$TARGETFILE" ] && [ "$ERROR" = "0" ] && break
    done
    exit
fi

if is_ipfsurl "$1" ; then
    [ -n "$2" ] && fatal "too many args when ipfs://Qm... used: extra '$2' arg"
    sget "$1" "$TARGETFILE"
    exit
fi

SEPMASK=""
# if mask is the second arg
if [ -n "$2" ] ; then
    URL="$1"
    MASK="$2"
    SEPMASK="$2"
else
    if [ -n "$NOGLOB" ] || have_end_slash_or_php_parametr "$1" || is_rsyncurl "$1" || is_sshurl "$1" ; then
        URL="$1"
        MASK=""
    else
        # drop mask part
        URL="$(dirname "$1")/"
        # wildcards allowed only in the last part of path
        MASK=$(basename "$1")
    fi

fi

# Process input file if specified
if [ -n "$INPUTFILE" ] ; then
	# Set output dir to current dir if not specified
	[ -z "$USEOUTPUTDIR" ] && USEOUTPUTDIR="."

	# Read URLs from file
	URLS="$(read_urls_from_file "$INPUTFILE")"
	[ -z "$URLS" ] && fatal "Error: no valid URLs found in input file"

	# Process each line separately (each line = one file, possibly with mirrors)
	echo "$URLS" | while IFS= read -r line ; do
		[ -z "$line" ] && continue
		# Each line may contain multiple URLs (mirrors) separated by spaces
		sget_with_mirrors $line
	done
	exit
fi

# https://www.freeoffice.com/download.php?filename=freeoffice-2021-1062.x86_64.rpm
if [ -z "$NOGLOB" ] && echo "$URL" | grep -q -P "[*\[\]]" ; then
    fatal "Error: there are globbing symbol (*[]) in $URL. It is allowed only for mask part"
fi

if is_url "$MASK" ; then
    #[ -z "$USEOUTPUTDIR" ] && fatal "eget supports only one URL as argument by default, use --output-dir to download in parallel"
    [ -z "$USEOUTPUTDIR" ] && USEOUTPUTDIR="."
fi

# if more than one file or if --output-dir is used
if [ -n "$USEOUTPUTDIR" ] ; then
    if [ -n "$TIMESTAMPING" ] ; then
        fatal "Error: --timestamping is not supported with --output-dir (parallel downloads)"
    fi
    pget "$@"
    exit
else
    [ -n "$3" ] && fatal "too many args: extra '$3'. May be you need use quotes for arg with wildcards."
fi

if [ -n "$LISTONLY" ] ; then
    for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do
        is_url "$fn" && echo "$fn" && continue
        make_fileurl "$URL" "$fn"
    done
    exit
fi

is_wildcard()
{
    echo "$1" | grep -q "[*?]" && return
    echo "$1" | grep -q "\]" && return
    echo "$1" | grep -q "\[" && return
}

# If there is no wildcard symbol like asterisk, just download
if [ -z "$SEPMASK" ] && ! is_wildcard "$MASK" || echo "$MASK" | grep -q "[?].*="; then
    sget "$1" "$TARGETFILE"
    exit
fi

ERROR=0
for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do
    is_url "$fn" || fn="$(make_fileurl "$URL" "$fn" )" #"
    sget "$fn" "$TARGETFILE" || ERROR=1
    [ -n "$TARGETFILE" ] && [ "$ERROR" = "0" ] && break
done
exit $ERROR
