-
Notifications
You must be signed in to change notification settings - Fork 144
/
upload.bash
executable file
·754 lines (673 loc) · 37.2 KB
/
upload.bash
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
#!/usr/bin/env bash
# Upload a file to Google Drive
# shellcheck source=/dev/null
_usage() {
printf "%b" "
The script can be used to upload file/directory to google drive.\n
Usage:\n ${0##*/} [options.. ] <filename> <foldername>\n
Foldername argument is optional. If not provided, the file will be uploaded to preconfigured google drive.\n
File name argument is optional if create directory option is used.\n
Options:\n
-c | -C | --create-dir <foldername> - option to create directory. Will provide folder id. Can be used to provide input folder, see README.\n
-r | --root-dir <google_folderid> or <google_folder_url> - google folder ID/URL to which the file/directory is going to upload.
If you want to change the default value, then use this format, -r/--root-dir default=root_folder_id/root_folder_url\n
-s | --skip-subdirs - Skip creation of sub folders and upload all files inside the INPUT folder/sub-folders in the INPUT folder, use this along with -p/--parallel option to speed up the uploads.\n
-p | --parallel <no_of_files_to_parallely_upload> - Upload multiple files in parallel, Max value = 10.\n
-f | --[file|folder] - Specify files and folders explicitly in one command, use multiple times for multiple folder/files. See README for more use of this command.\n
-cl | --clone - Upload a gdrive file without downloading, require accessible gdrive link or id as argument.\n
-o | --overwrite - Overwrite the files with the same name, if present in the root folder/input folder, also works with recursive folders.\n
-d | --skip-duplicates - Do not upload the files with the same name, if already present in the root folder/input folder, also works with recursive folders.\n
-S | --share <optional_email_address>- Share the uploaded input file/folder, grant reader permission to provided email address or to everyone with the shareable link.\n
--speed 'speed' - Limit the download speed, supported formats: 1K, 1M and 1G.\n
-i | --save-info <file_to_save_info> - Save uploaded files info to the given filename.\n
-z | --config <config_path> - Override default config file with custom config file.\nIf you want to change default value, then use this format -z/--config default=default=your_config_file_path.\n
-q | --quiet - Supress the normal output, only show success/error upload messages for files, and one extra line at the beginning for folder showing no. of files and sub folders.\n
-R | --retry 'num of retries' - Retry the file upload if it fails, postive integer as argument. Currently only for file uploads.\n
-in | --include 'pattern' - Only include the files with the given pattern to upload - Applicable for folder uploads.\n
e.g: ${0##*/} local_folder --include "*1*", will only include with files with pattern '1' in the name.\n
-ex | --exclude 'pattern' - Exclude the files with the given pattern from uploading. - Applicable for folder uploads.\n
e.g: ${0##*/} local_folder --exclude "*1*", will exclude all the files pattern '1' in the name.\n
--hide - This flag will prevent the script to print sensitive information like root folder id or drivelink.\n
-v | --verbose - Display detailed message (only for non-parallel uploads).\n
-V | --verbose-progress - Display detailed message and detailed upload progress(only for non-parallel uploads).\n
--skip-internet-check - Do not check for internet connection, recommended to use in sync jobs.
$([[ ${GUPLOAD_INSTALLED_WITH} = script ]] && printf '%s\n' '\n -u | --update - Update the installed script in your system.\n
-U | --uninstall - Uninstall script, remove related files.\n')
--info - Show detailed info, only if script is installed system wide.\n
-D | --debug - Display script command trace.\n
-h | --help - Display this message.\n"
exit 0
}
_short_help() {
printf "No valid arguments provided, use -h/--help flag to see usage.\n"
exit 0
}
###################################################
# Print info if installed
# Globals: 7 variable
# COMMAND_NAME REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA
# Arguments: None
# Result: read description
###################################################
_version_info() {
if command -v "${COMMAND_NAME}" 1> /dev/null && [[ -n "${REPO:+${COMMAND_NAME:+${INSTALL_PATH:+${TYPE:+${TYPE_VALUE}}}}}" ]]; then
for i in REPO INSTALL_PATH INSTALLATION TYPE TYPE_VALUE LATEST_INSTALLED_SHA CONFIG; do
printf "%s\n" "${i}=\"${!i}\""
done | sed -e "s/=/: /g"
else
printf "%s\n" "google-drive-upload is not installed system wide."
fi
exit 0
}
###################################################
# Function to cleanup config file
# Remove invalid access tokens on the basis of corresponding expiry
# Globals: None
# Arguments: 1
# ${1} = config file
# Result: read description
###################################################
_cleanup_config() {
declare config="${1:?Error: Missing config}" values_regex
! [ -f "${config}" ] && return 0
while read -r line; do
expiry_value_name="${line%%=*}"
token_value_name="${expiry_value_name%%_EXPIRY}"
: "${line##*=}" && : "${_%\"}" && expiry="${_#\"}"
[[ ${expiry} -le "$(printf "%(%s)T\\n" "-1")" ]] &&
values_regex="${values_regex:+${values_regex}|}${expiry_value_name}=\".*\"|${token_value_name}=\".*\""
done <<< "$(grep -F ACCESS_TOKEN_EXPIRY "${config}" || :)"
chmod +w "${config}" &&
printf "%s\n" "$(grep -Ev "^\$${values_regex:+|${values_regex}}" "${config}")" >| "${config}" &&
chmod -w "${config}"
return 0
}
###################################################
# Process all arguments given to the script
# Globals: 2 variable, 1 function
# Variable - HOME, CONFIG
# Functions - _short_help
# Arguments: Many
# ${@} = Flags with argument and file/folder input
# Result: On
# Success - Set all the variables
# Error - Print error message and exit
# Reference:
# Email Regex - https://stackoverflow.com/a/57295993
###################################################
_setup_arguments() {
[[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 1
# Internal variables
# De-initialize if any variables set already.
unset FIRST_INPUT FOLDER_INPUT FOLDERNAME LOCAL_INPUT_ARRAY ID_INPUT_ARRAY
unset PARALLEL NO_OF_PARALLEL_JOBS SHARE SHARE_EMAIL OVERWRITE SKIP_DUPLICATES SKIP_SUBDIRS ROOTDIR QUIET
unset VERBOSE VERBOSE_PROGRESS DEBUG LOG_FILE_ID CURL_SPEED RETRY
CURL_PROGRESS="-s" EXTRA_LOG=":" CURL_PROGRESS_EXTRA="-s"
INFO_PATH="${HOME}/.google-drive-upload" CONFIG_INFO="${INFO_PATH}/google-drive-upload.configpath"
[[ -f ${CONFIG_INFO} ]] && . "${CONFIG_INFO}"
CONFIG="${CONFIG:-${HOME}/.googledrive.conf}"
# Configuration variables # Remote gDrive variables
unset ROOT_FOLDER CLIENT_ID CLIENT_SECRET REFRESH_TOKEN ACCESS_TOKEN
API_URL="https://www.googleapis.com"
API_VERSION="v3"
SCOPE="${API_URL}/auth/drive"
REDIRECT_URI="urn:ietf:wg:oauth:2.0:oob"
TOKEN_URL="https://accounts.google.com/o/oauth2/token"
_check_config() {
[[ ${1} = default* ]] && export UPDATE_DEFAULT_CONFIG="_update_config"
{ [[ -r ${2} ]] && CONFIG="${2}"; } || {
printf "Error: Given config file (%s) doesn't exist/not readable,..\n" "${1}" 1>&2 && exit 1
}
return 0
}
_check_longoptions() {
[[ -z ${2} ]] &&
printf '%s: %s: option requires an argument\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" &&
exit 1
return 0
}
while [[ $# -gt 0 ]]; do
case "${1}" in
-h | --help) _usage ;;
-D | --debug) DEBUG="true" && export DEBUG ;;
--info) _version_info ;;
-c | -C | --create-dir)
_check_longoptions "${1}" "${2}"
FOLDERNAME="${2}" && shift
;;
-r | --root-dir)
_check_longoptions "${1}" "${2}"
ROOTDIR="${2/default=/}"
[[ ${2} = default* ]] && UPDATE_DEFAULT_ROOTDIR="_update_config"
shift
;;
-z | --config)
_check_longoptions "${1}" "${2}"
_check_config "${2}" "${2/default=/}"
shift
;;
-i | --save-info)
_check_longoptions "${1}" "${2}"
LOG_FILE_ID="${2}" && shift
;;
-s | --skip-subdirs) SKIP_SUBDIRS="true" ;;
-p | --parallel)
_check_longoptions "${1}" "${2}"
NO_OF_PARALLEL_JOBS="${2}"
if [[ ${2} -gt 0 ]]; then
NO_OF_PARALLEL_JOBS="$((NO_OF_PARALLEL_JOBS > 10 ? 10 : NO_OF_PARALLEL_JOBS))"
else
printf "\nError: -p/--parallel value ranges between 1 to 10.\n"
exit 1
fi
PARALLEL_UPLOAD="parallel" && shift
;;
-o | --overwrite) OVERWRITE="Overwrite" && UPLOAD_MODE="update" ;;
-d | --skip-duplicates) SKIP_DUPLICATES="Skip Existing" && UPLOAD_MODE="update" ;;
-f | --file | --folder)
_check_longoptions "${1}" "${2}"
LOCAL_INPUT_ARRAY+=("${2}") && shift
;;
-cl | --clone)
_check_longoptions "${1}" "${2}"
FINAL_ID_INPUT_ARRAY+=("$(_extract_id "${2}")") && shift
;;
-S | --share)
SHARE="_share_id"
EMAIL_REGEX="^([A-Za-z]+[A-Za-z0-9]*\+?((\.|\-|\_)?[A-Za-z]+[A-Za-z0-9]*)*)@(([A-Za-z0-9]+)+((\.|\-|\_)?([A-Za-z0-9]+)+)*)+\.([A-Za-z]{2,})+$"
[[ -n ${1} && ! ${1} = -* ]] && SHARE_EMAIL="${2}" && {
! [[ ${SHARE_EMAIL} =~ ${EMAIL_REGEX} ]] && printf "\nError: Provided email address for share option is invalid.\n" && exit 1
shift
}
;;
--speed)
_check_longoptions "${1}" "${2}"
regex='^([0-9]+)([k,K]|[m,M]|[g,G])+$'
if [[ ${2} =~ ${regex} ]]; then
CURL_SPEED="--limit-rate ${2}" && shift
else
printf "Error: Wrong speed limit format, supported formats: 1K , 1M and 1G\n" 1>&2
exit 1
fi
;;
-R | --retry)
_check_longoptions "${1}" "${2}"
if [[ ${2} -gt 0 ]]; then
RETRY="${2}" && shift
else
printf "Error: -R/--retry only takes positive integers as arguments, min = 1, max = infinity.\n"
exit 1
fi
;;
-in | --include)
_check_longoptions "${1}" "${2}"
INCLUDE_FILES="${INCLUDE_FILES} -name '${2}' " && shift
;;
-ex | --exclude)
_check_longoptions "${1}" "${2}"
EXCLUDE_FILES="${EXCLUDE_FILES} ! -name '${2}' " && shift
;;
--hide) HIDE_INFO=":" ;;
-q | --quiet) QUIET="_print_center_quiet" ;;
-v | --verbose) VERBOSE="true" ;;
-V | --verbose-progress) VERBOSE_PROGRESS="true" ;;
--skip-internet-check) SKIP_INTERNET_CHECK=":" ;;
'') shorthelp ;;
*) # Check if user meant it to be a flag
if [[ ${1} = -* ]]; then
[[ ${GUPLOAD_INSTALLED_WITH} = script ]] && {
case "${1}" in
-u | --update)
_check_debug && _update && { exit 0 || exit 1; }
;;
--uninstall)
_check_debug && _update uninstall && { exit 0 || exit 1; }
;;
esac
}
printf '%s: %s: Unknown option\nTry '"%s -h/--help"' for more information.\n' "${0##*/}" "${1}" "${0##*/}" && exit 1
else
if [[ ${1} =~ (drive.google.com|docs.google.com) ]]; then
FINAL_ID_INPUT_ARRAY+=("$(_extract_id "${1}")")
else
# If no "-" is detected in 1st arg, it adds to input
LOCAL_INPUT_ARRAY+=("${1}")
fi
fi
;;
esac
shift
done
_check_debug
[[ -n ${VERBOSE_PROGRESS} ]] && unset VERBOSE && CURL_PROGRESS=""
[[ -n ${QUIET} ]] && CURL_PROGRESS="-s"
# Get foldername, prioritise the input given by -C/--create-dir option.
FOLDERNAME="$(_extract_id "${FOLDERNAME:-${FOLDER_INPUT}}")"
unset Aseen && declare -A Aseen
for input in "${LOCAL_INPUT_ARRAY[@]}"; do
{ [[ ${Aseen[${input}]} ]] && continue; } || Aseen[${input}]=x
{ [[ -r ${input} ]] && FINAL_LOCAL_INPUT_ARRAY+=("${input}"); } || {
{ "${QUIET:-_print_center}" 'normal' "[ Error: Invalid Input - ${input} ]" "=" && printf "\n"; } 1>&2
continue
}
done
# If no input, then check if -C option was used or not.
[[ -z ${FINAL_LOCAL_INPUT_ARRAY[*]:-${FINAL_ID_INPUT_ARRAY[*]:-${FOLDERNAME}}} ]] && _short_help
# create info path folder, can be missing if gupload was not installed with install.sh
mkdir -p "${INFO_PATH}"
return 0
}
###################################################
# Check Oauth credentials and create/update config file
# Client ID, Client Secret, Refesh Token and Access Token
# Globals: 10 variables, 3 functions
# Variables - API_URL, API_VERSION, TOKEN URL,
# CONFIG, UPDATE_DEFAULT_CONFIG, INFO_PATH,
# CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN and ACCESS_TOKEN
# Functions - _update_config, _update_value, _json_value and _print_center
# Arguments: None
# Result: read description
###################################################
_check_credentials() {
# Config file is created automatically after first run
[[ -r ${CONFIG} ]] && . "${CONFIG}"
"${UPDATE_DEFAULT_CONFIG:-:}" CONFIG "${CONFIG}" "${CONFIG_INFO}"
! [[ -t 1 ]] && [[ -z ${CLIENT_ID:+${CLIENT_SECRET:+${REFRESH_TOKEN}}} ]] && {
printf "%s\n" "Error: Script is not running in a terminal, cannot ask for credentials."
printf "%s\n" "Add in config manually if terminal is not accessible. CLIENT_ID, CLIENT_SECRET and REFRESH_TOKEN is required." && return 1
}
# Following https://developers.google.com/identity/protocols/oauth2#size
CLIENT_ID_REGEX='[0-9]+-[0-9A-Za-z_]{32}\.apps\.googleusercontent\.com'
CLIENT_SECRET_REGEX='[0-9A-Za-z_-]+'
REFRESH_TOKEN_REGEX='[0-9]//[0-9A-Za-z_-]+' # 512 bytes
ACCESS_TOKEN_REGEX='ya29\.[0-9A-Za-z_-]+' # 2048 bytes
AUTHORIZATION_CODE_REGEX='[0-9]/[0-9A-Za-z_-]+' # 256 bytes
until [[ -n ${CLIENT_ID} && -n ${CLIENT_ID_VALID} ]]; do
[[ -n ${CLIENT_ID} ]] && {
if [[ ${CLIENT_ID} =~ ${CLIENT_ID_REGEX} ]]; then
[[ -n ${client_id} ]] && _update_config CLIENT_ID "${CLIENT_ID}" "${CONFIG}"
CLIENT_ID_VALID="true" && continue
else
{ [[ -n ${client_id} ]] && message="- Try again"; } || message="in config ( ${CONFIG} )"
"${QUIET:-_print_center}" "normal" " Invalid Client ID ${message} " "-" && unset CLIENT_ID client_id
fi
}
[[ -z ${client_id} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter Client ID " "-"
[[ -n ${client_id} ]] && _clear_line 1
printf -- "-> "
read -r CLIENT_ID && client_id=1
done
until [[ -n ${CLIENT_SECRET} && -n ${CLIENT_SECRET_VALID} ]]; do
[[ -n ${CLIENT_SECRET} ]] && {
if [[ ${CLIENT_SECRET} =~ ${CLIENT_SECRET_REGEX} ]]; then
[[ -n ${client_secret} ]] && _update_config CLIENT_SECRET "${CLIENT_SECRET}" "${CONFIG}"
CLIENT_SECRET_VALID="true" && continue
else
{ [[ -n ${client_secret} ]] && message="- Try again"; } || message="in config ( ${CONFIG} )"
"${QUIET:-_print_center}" "normal" " Invalid Client Secret ${message} " "-" && unset CLIENT_SECRET client_secret
fi
}
[[ -z ${client_secret} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter Client Secret " "-"
[[ -n ${client_secret} ]] && _clear_line 1
printf -- "-> "
read -r CLIENT_SECRET && client_secret=1
done
[[ -n ${REFRESH_TOKEN} ]] && {
! [[ ${REFRESH_TOKEN} =~ ${REFRESH_TOKEN_REGEX} ]] &&
"${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token in config file, follow below steps.. " "-" && unset REFRESH_TOKEN
}
[[ -z ${REFRESH_TOKEN} ]] && {
printf "\n" && "${QUIET:-_print_center}" "normal" "If you have a refresh token generated, then type the token, else leave blank and press return key.." " "
printf "\n" && "${QUIET:-_print_center}" "normal" " Refresh Token " "-" && printf -- "-> "
read -r REFRESH_TOKEN
if [[ -n ${REFRESH_TOKEN} ]]; then
"${QUIET:-_print_center}" "normal" " Checking refresh token.. " "-"
if [[ ${REFRESH_TOKEN} =~ ${REFRESH_TOKEN_REGEX} ]]; then
{ _get_access_token_and_update && _update_config REFRESH_TOKEN "${REFRESH_TOKEN}" "${CONFIG}"; } || check_error=true
else
check_error=true
fi
[[ -n ${check_error} ]] && "${QUIET:-_print_center}" "normal" " Error: Invalid Refresh token given, follow below steps to generate.. " "-" && unset REFRESH_TOKEN
else
"${QUIET:-_print_center}" "normal" " No Refresh token given, follow below steps to generate.. " "-"
fi
[[ -z ${REFRESH_TOKEN} ]] && {
printf "\n" && "${QUIET:-_print_center}" "normal" "Visit the below URL, tap on allow and then enter the code obtained" " "
URL="https://accounts.google.com/o/oauth2/auth?client_id=${CLIENT_ID}&redirect_uri=${REDIRECT_URI}&scope=${SCOPE}&response_type=code&prompt=consent"
printf "\n%s\n" "${URL}"
until [[ -n ${AUTHORIZATION_CODE} && -n ${AUTHORIZATION_CODE_VALID} ]]; do
[[ -n ${AUTHORIZATION_CODE} ]] && {
if [[ ${AUTHORIZATION_CODE} =~ ${AUTHORIZATION_CODE_REGEX} ]]; then
AUTHORIZATION_CODE_VALID="true" && continue
else
"${QUIET:-_print_center}" "normal" " Invalid CODE given, try again.. " "-" && unset AUTHORIZATION_CODE authorization_code
fi
}
{ [[ -z ${authorization_code} ]] && printf "\n" && "${QUIET:-_print_center}" "normal" " Enter the authorization code " "-"; } || _clear_line 1
printf -- "-> "
read -r AUTHORIZATION_CODE && authorization_code=1
done
RESPONSE="$(curl --compressed "${CURL_PROGRESS}" -X POST \
--data "code=${AUTHORIZATION_CODE}&client_id=${CLIENT_ID}&client_secret=${CLIENT_SECRET}&redirect_uri=${REDIRECT_URI}&grant_type=authorization_code" "${TOKEN_URL}")" || :
_clear_line 1 1>&2
REFRESH_TOKEN="$(_json_value refresh_token 1 1 <<< "${RESPONSE}" || :)"
{ _get_access_token_and_update "${RESPONSE}" && _update_config REFRESH_TOKEN "${REFRESH_TOKEN}" "${CONFIG}"; } || return 1
}
printf "\n"
}
[[ -z ${ACCESS_TOKEN} || ${ACCESS_TOKEN_EXPIRY:-0} -lt "$(printf "%(%s)T\\n" "-1")" ]] || ! [[ ${ACCESS_TOKEN} =~ ${ACCESS_TOKEN_REGEX} ]] &&
{ _get_access_token_and_update || return 1; }
printf "%b\n" "ACCESS_TOKEN=\"${ACCESS_TOKEN}\"\nACCESS_TOKEN_EXPIRY=\"${ACCESS_TOKEN_EXPIRY}\"" >| "${TMPFILE}_ACCESS_TOKEN"
# launch a background service to check access token and update it
# checks ACCESS_TOKEN_EXPIRY, try to update before 5 mins of expiry, a fresh token gets 60 mins
# process will be killed when script exits or "${MAIN_PID}" is killed
{
until ! kill -0 "${MAIN_PID}" 2>| /dev/null 1>&2; do
. "${TMPFILE}_ACCESS_TOKEN"
CURRENT_TIME="$(printf "%(%s)T\\n" "-1")"
REMAINING_TOKEN_TIME="$((ACCESS_TOKEN_EXPIRY - CURRENT_TIME))"
if [[ ${REMAINING_TOKEN_TIME} -le 300 ]]; then
# timeout after 30 seconds, it shouldn't take too long anyway, and update tmp config
CONFIG="${TMPFILE}_ACCESS_TOKEN" _timeout 30 _get_access_token_and_update || :
else
TOKEN_PROCESS_TIME_TO_SLEEP="$(if [[ ${REMAINING_TOKEN_TIME} -le 301 ]]; then
printf "0\n"
else
printf "%s\n" "$((REMAINING_TOKEN_TIME - 300))"
fi)"
sleep "${TOKEN_PROCESS_TIME_TO_SLEEP}"
fi
sleep 1
done
} &
ACCESS_TOKEN_SERVICE_PID="${!}"
return 0
}
###################################################
# Setup root directory where all file/folders will be uploaded/updated
# Globals: 5 variables, 5 functions
# Variables - ROOTDIR, ROOT_FOLDER, UPDATE_DEFAULT_ROOTDIR, CONFIG, QUIET
# Functions - _print_center, _drive_info, _extract_id, _update_config, _json_value
# Arguments: 1
# ${1} = Positive integer ( amount of time in seconds to sleep )
# Result: read description
# If root id not found then pribt message and exit
# Update config with root id and root id name if specified
# Reference:
# https://github.com/dylanaraps/pure-bash-bible#use-read-as-an-alternative-to-the-sleep-command
###################################################
_setup_root_dir() {
_check_root_id() {
declare json rootid
json="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "id")"
if ! rootid="$(_json_value id 1 1 <<< "${json}")"; then
{ [[ ${json} =~ "File not found" ]] && "${QUIET:-_print_center}" "justify" "Given root folder" " ID/URL invalid." "=" 1>&2; } || {
printf "%s\n" "${json}" 1>&2
}
return 1
fi
ROOT_FOLDER="${rootid}"
"${1:-:}" ROOT_FOLDER "${ROOT_FOLDER}" "${CONFIG}"
return 0
}
_check_root_id_name() {
ROOT_FOLDER_NAME="$(_drive_info "$(_extract_id "${ROOT_FOLDER}")" "name" | _json_value name || :)"
"${1:-:}" ROOT_FOLDER_NAME "${ROOT_FOLDER_NAME}" "${CONFIG}"
return 0
}
if [[ -n ${ROOTDIR:-} ]]; then
ROOT_FOLDER="${ROOTDIR}" && { _check_root_id "${UPDATE_DEFAULT_ROOTDIR}" || return 1; } && unset ROOT_FOLDER_NAME
elif [[ -z ${ROOT_FOLDER} ]]; then
{ [[ -t 1 ]] && "${QUIET:-_print_center}" "normal" "Enter root folder ID or URL, press enter for default ( root )" " " && printf -- "-> " &&
read -r ROOT_FOLDER && [[ -n ${ROOT_FOLDER} ]] && { _check_root_id _update_config || return 1; }; } || {
ROOT_FOLDER="root"
_update_config ROOT_FOLDER "${ROOT_FOLDER}" "${CONFIG}"
}
fi
[[ -z ${ROOT_FOLDER_NAME} ]] && _check_root_id_name "${UPDATE_DEFAULT_ROOTDIR}"
return 0
}
###################################################
# Setup Workspace folder
# Check if the given folder exists in google drive.
# If not then the folder is created in google drive under the configured root folder.
# Globals: 2 variables, 3 functions
# Variables - FOLDERNAME, ROOT_FOLDER
# Functions - _create_directory, _drive_info, _json_value
# Arguments: None
# Result: Read Description
###################################################
_setup_workspace() {
if [[ -z ${FOLDERNAME} ]]; then
WORKSPACE_FOLDER_ID="${ROOT_FOLDER}"
WORKSPACE_FOLDER_NAME="${ROOT_FOLDER_NAME}"
else
WORKSPACE_FOLDER_ID="$(_create_directory "${FOLDERNAME}" "${ROOT_FOLDER}")" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_ID}" 1>&2 && return 1; }
WORKSPACE_FOLDER_NAME="$(_drive_info "${WORKSPACE_FOLDER_ID}" name | _json_value name 1 1)" ||
{ printf "%s\n" "${WORKSPACE_FOLDER_NAME}" 1>&2 && return 1; }
fi
return 0
}
###################################################
# Process all the values in "${FINAL_LOCAL_INPUT_ARRAY[@]}" & "${FINAL_ID_INPUT_ARRAY[@]}"
# Globals: 22 variables, 17 functions
# Variables - FINAL_LOCAL_INPUT_ARRAY ( array ), ACCESS_TOKEN, VERBOSE, VERBOSE_PROGRESS
# WORKSPACE_FOLDER_ID, UPLOAD_MODE, SKIP_DUPLICATES, OVERWRITE, SHARE,
# UPLOAD_STATUS, COLUMNS, API_URL, API_VERSION, TOKEN_URL, LOG_FILE_ID
# FILE_ID, FILE_LINK, FINAL_ID_INPUT_ARRAY ( array )
# PARALLEL_UPLOAD, QUIET, NO_OF_PARALLEL_JOBS, TMPFILE
# Functions - _print_center, _clear_line, _newline, _support_ansi_escapes, _print_center_quiet
# _upload_file, _share_id, _is_terminal, _dirname,
# _create_directory, _json_value, _url_encode, _check_existing_file, _bytes_to_human
# _clone_file, _get_access_token_and_update, _get_rootdir_id
# Arguments: None
# Result: Upload/Clone all the input files/folders, if a folder is empty, print Error message.
###################################################
_process_arguments() {
export API_URL API_VERSION TOKEN_URL ACCESS_TOKEN \
LOG_FILE_ID OVERWRITE UPLOAD_MODE SKIP_DUPLICATES CURL_SPEED RETRY UTILS_FOLDER TMPFILE \
QUIET VERBOSE VERBOSE_PROGRESS CURL_PROGRESS CURL_PROGRESS_EXTRA CURL_PROGRESS_EXTRA_CLEAR COLUMNS EXTRA_LOG PARALLEL_UPLOAD
export -f _bytes_to_human _dirname _json_value _url_encode _support_ansi_escapes _newline _print_center_quiet _print_center _clear_line \
_api_request _get_access_token_and_update _check_existing_file _upload_file _upload_file_main _clone_file _collect_file_info _generate_upload_link _upload_file_from_uri _full_upload \
_normal_logging_upload _error_logging_upload _log_upload_session _remove_upload_session _upload_folder _share_id _get_rootdir_id
# on successful uploads
_share_and_print_link() {
"${SHARE:-:}" "${1:-}" "${SHARE_EMAIL}"
[[ -z ${HIDE_INFO} ]] && {
_print_center "justify" "DriveLink" "${SHARE:+ (SHARED)}" "-"
_support_ansi_escapes && [[ ${COLUMNS} -gt 45 ]] && _print_center "normal" "↓ ↓ ↓" ' '
_print_center "normal" "https://drive.google.com/open?id=${1:-}" " "
}
return 0
}
for input in "${FINAL_LOCAL_INPUT_ARRAY[@]}"; do
# Check if the argument is a file or a directory.
if [[ -f ${input} ]]; then
_print_center "justify" "Given Input" ": FILE" "="
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
_upload_file_main noparse "${input}" "${WORKSPACE_FOLDER_ID}"
if [[ ${RETURN_STATUS} = 1 ]]; then
_share_and_print_link "${FILE_ID}"
printf "\n"
else
for _ in 1 2; do _clear_line 1; done && continue
fi
elif [[ -d ${input} ]]; then
input="$(cd "${input}" && pwd)" # to handle _dirname when current directory (.) is given as input.
unset EMPTY # Used when input folder is empty
_print_center "justify" "Given Input" ": FOLDER" "-"
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
FOLDER_NAME="${input##*/}" && "${EXTRA_LOG}" "justify" "Folder: ${FOLDER_NAME}" "="
NEXTROOTDIRID="${WORKSPACE_FOLDER_ID}"
"${EXTRA_LOG}" "justify" "Processing folder.." "-"
[[ -z ${SKIP_SUBDIRS} ]] && "${EXTRA_LOG}" "justify" "Indexing subfolders.." "-"
# Do not create empty folders during a recursive upload. Use of find in this section is important.
mapfile -t DIRNAMES <<< "$(find "${input}" -type d -not -empty)"
NO_OF_FOLDERS="${#DIRNAMES[@]}" && NO_OF_SUB_FOLDERS="$((NO_OF_FOLDERS - 1))"
[[ -z ${SKIP_SUBDIRS} ]] && _clear_line 1
[[ ${NO_OF_SUB_FOLDERS} = 0 ]] && SKIP_SUBDIRS="true"
"${EXTRA_LOG}" "justify" "Indexing files.." "-"
mapfile -t FILENAMES <<< "$(_tmp='find "'${input}'" -type f -name "*" '${INCLUDE_FILES}' '${EXCLUDE_FILES}'' && eval "${_tmp}")"
_clear_line 1
ERROR_STATUS=0 SUCCESS_STATUS=0
# Skip the sub folders and find recursively all the files and upload them.
if [[ -n ${SKIP_SUBDIRS} ]]; then
if [[ -n ${FILENAMES[0]} ]]; then
for _ in 1 2; do _clear_line 1; done
NO_OF_FILES="${#FILENAMES[@]}"
"${QUIET:-_print_center}" "justify" "Folder: ${FOLDER_NAME} " "| ${NO_OF_FILES} File(s)" "=" && printf "\n"
"${EXTRA_LOG}" "justify" "Creating folder.." "-"
{ ID="$(_create_directory "${input}" "${NEXTROOTDIRID}")" && export ID; } ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
_clear_line 1 && DIRIDS="${ID}"
[[ -z ${PARALLEL_UPLOAD:-${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n"
_upload_folder "${PARALLEL_UPLOAD:-normal}" noparse "$(printf "%s\n" "${FILENAMES[@]}")" "${ID}"
[[ -n ${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n\n"
else
for _ in 1 2; do _clear_line 1; done && EMPTY=1
fi
else
if [[ -n ${FILENAMES[0]} ]]; then
for _ in 1 2; do _clear_line 1; done
NO_OF_FILES="${#FILENAMES[@]}"
"${QUIET:-_print_center}" "justify" "${FOLDER_NAME} " "| ${NO_OF_FILES} File(s) | ${NO_OF_SUB_FOLDERS} Sub-folders" "="
_newline "\n" && "${EXTRA_LOG}" "justify" "Creating Folder(s).." "-" && _newline "\n"
unset status DIRIDS
for dir in "${DIRNAMES[@]}"; do
[[ -n ${status} ]] && __dir="$(_dirname "${dir}")" &&
__temp="$(printf "%s\n" "${DIRIDS}" | grep -F "|:_//_:|${__dir}|:_//_:|")" &&
NEXTROOTDIRID="${__temp%%"|:_//_:|${__dir}|:_//_:|"}"
NEWDIR="${dir##*/}" && _print_center "justify" "Name: ${NEWDIR}" "-" 1>&2
ID="$(_create_directory "${NEWDIR}" "${NEXTROOTDIRID}")" ||
{ "${QUIET:-_print_center}" "normal" "Folder creation failed" "-" && printf "%s\n\n\n" "${ID}" 1>&2 && continue; }
# Store sub-folder directory IDs and it's path for later use.
DIRIDS+="${ID}|:_//_:|${dir}|:_//_:|"$'\n'
for _ in 1 2; do _clear_line 1 1>&2; done
"${EXTRA_LOG}" "justify" "Status" ": $((status += 1)) / ${NO_OF_FOLDERS}" "=" 1>&2
done && export DIRIDS
_clear_line 1
_upload_folder "${PARALLEL_UPLOAD:-normal}" parse "$(printf "%s\n" "${FILENAMES[@]}")"
[[ -n ${PARALLEL_UPLOAD:+${VERBOSE:-${VERBOSE_PROGRESS}}} ]] && _newline "\n\n"
else
for _ in 1 2 3; do _clear_line 1; done && EMPTY=1
fi
fi
if [[ ${EMPTY} != 1 ]]; then
[[ -z ${VERBOSE:-${VERBOSE_PROGRESS}} ]] && for _ in 1 2; do _clear_line 1; done
[[ ${SUCCESS_STATUS} -gt 0 ]] &&
FOLDER_ID="$(: "${DIRIDS%%$'\n'*}" && printf "%s\n" "${_/"|:_//_:|"*/}")" &&
_share_and_print_link "${FOLDER_ID}"
_newline "\n"
[[ ${SUCCESS_STATUS} -gt 0 ]] && "${QUIET:-_print_center}" "justify" "Total Files " "Uploaded: ${SUCCESS_STATUS}" "="
[[ ${ERROR_STATUS} -gt 0 ]] && "${QUIET:-_print_center}" "justify" "Total Files " "Failed: ${ERROR_STATUS}" "="
printf "\n"
else
for _ in 1 2 3; do _clear_line 1; done
"${QUIET:-_print_center}" 'justify' "Empty Folder" ": ${FOLDER_NAME}" "=" 1>&2
printf "\n"
fi
fi
done
unset Aseen && declare -A Aseen
for gdrive_id in "${FINAL_ID_INPUT_ARRAY[@]}"; do
{ [[ ${Aseen[${gdrive_id}]} ]] && continue; } || Aseen[${gdrive_id}]=x
_print_center "justify" "Given Input" ": ID" "="
"${EXTRA_LOG}" "justify" "Checking if id exists.." "-"
json="$(_drive_info "${gdrive_id}" "name,mimeType,size" || :)"
if ! _json_value code 1 1 <<< "${json}" 2>| /dev/null 1>&2; then
type="$(_json_value mimeType 1 1 <<< "${json}" || :)"
name="$(_json_value name 1 1 <<< "${json}" || :)"
size="$(_json_value size 1 1 <<< "${json}" || :)"
for _ in 1 2; do _clear_line 1; done
if [[ ${type} =~ folder ]]; then
"${QUIET:-_print_center}" "justify" "Folder not supported." "=" 1>&2 && _newline "\n" 1>&2 && continue
## TODO: Add support to clone folders
else
_print_center "justify" "Given Input" ": File ID" "="
_print_center "justify" "Upload Method" ": ${SKIP_DUPLICATES:-${OVERWRITE:-Create}}" "=" && _newline "\n"
_clone_file "${UPLOAD_MODE:-create}" "${gdrive_id}" "${WORKSPACE_FOLDER_ID}" "${name}" "${size}" ||
{ for _ in 1 2; do _clear_line 1; done && continue; }
fi
_share_and_print_link "${FILE_ID}"
printf "\n"
else
_clear_line 1
"${QUIET:-_print_center}" "justify" "File ID (${HIDE_INFO:-gdrive_id})" " invalid." "=" 1>&2
printf "\n"
fi
done
return 0
}
main() {
[[ $# = 0 ]] && _short_help
[[ -z ${SELF_SOURCE} ]] && {
UTILS_FOLDER="${UTILS_FOLDER:-${PWD}}"
{ . "${UTILS_FOLDER}"/common-utils.bash && . "${UTILS_FOLDER}"/drive-utils.bash && . "${UTILS_FOLDER}"/upload-utils.bash; } ||
{ printf "Error: Unable to source util files.\n" && exit 1; }
}
_check_bash_version && set -o errexit -o noclobber -o pipefail
_setup_arguments "${@}"
"${SKIP_INTERNET_CHECK:-_check_internet}"
{ command -v mktemp 1>| /dev/null && TMPFILE="$(mktemp -u)"; } || TMPFILE="${PWD}/.$(_t="$(printf "%(%s)T\\n" "-1")" && printf "%s\n" "$((_t * _t))").LOG"
_cleanup() {
# unhide the cursor if hidden
[[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\e[?25h"
{
[[ -f ${TMPFILE}_ACCESS_TOKEN ]] && {
# update the config with latest ACCESS_TOKEN and ACCESS_TOKEN_EXPIRY only if changed
. "${TMPFILE}_ACCESS_TOKEN"
[[ ${INITIAL_ACCESS_TOKEN} = "${ACCESS_TOKEN}" ]] || {
_update_config ACCESS_TOKEN "${ACCESS_TOKEN}" "${CONFIG}"
_update_config ACCESS_TOKEN_EXPIRY "${ACCESS_TOKEN_EXPIRY}" "${CONFIG}"
}
} 1>| /dev/null
# grab all chidren processes of access token service
# https://askubuntu.com/a/512872
[[ -n ${ACCESS_TOKEN_SERVICE_PID} ]] && {
token_service_pids="$(ps --ppid="${ACCESS_TOKEN_SERVICE_PID}" -o pid=)"
# first kill parent id, then children processes
kill "${ACCESS_TOKEN_SERVICE_PID}"
} 1>| /dev/null
# grab all script children pids
script_children_pids="$(ps --ppid="${MAIN_PID}" -o pid=)"
# kill all grabbed children processes
# shellcheck disable=SC2086
kill ${token_service_pids} ${script_children_pids} 1>| /dev/null
rm -f "${TMPFILE:?}"*
export abnormal_exit && if [[ -n ${abnormal_exit} ]]; then
printf "\n\n%s\n" "Script exited manually."
kill -- -$$ &
else
{ _cleanup_config "${CONFIG}" && [[ ${GUPLOAD_INSTALLED_WITH} = script ]] && _auto_update; } 1>| /dev/null &
fi
} 2>| /dev/null || :
return 0
}
trap 'abnormal_exit="1"; exit' INT TERM
trap '_cleanup' EXIT
trap '' TSTP # ignore ctrl + z
export MAIN_PID="$$"
START="$(printf "%(%s)T\\n" "-1")"
"${EXTRA_LOG}" "justify" "Starting script" "-"
"${EXTRA_LOG}" "justify" "Checking credentials.." "-"
{ _check_credentials && for _ in 1 2; do _clear_line 1; done; } ||
{ "${QUIET:-_print_center}" "normal" "[ Error: Credentials checking failed ]" "=" && exit 1; }
_print_center "justify" "Required credentials available." "="
"${EXTRA_LOG}" "justify" "Checking root dir and workspace folder.." "-"
{ _setup_root_dir && for _ in 1 2; do _clear_line 1; done; } ||
{ "${QUIET:-_print_center}" "normal" "[ Error: Rootdir setup failed ]" "=" && exit 1; }
_print_center "justify" "Root dir properly configured." "="
"${EXTRA_LOG}" "justify" "Checking Workspace Folder.." "-"
{ _setup_workspace && for _ in 1 2; do _clear_line 1; done; } ||
{ "${QUIET:-_print_center}" "normal" "[ Error: Workspace setup failed ]" "=" && exit 1; }
_print_center "justify" "Workspace Folder: ${WORKSPACE_FOLDER_NAME}" "="
_print_center "normal" " ${WORKSPACE_FOLDER_ID} " "-" && _newline "\n"
# hide the cursor if ansi escapes are supported
[[ -n ${SUPPORT_ANSI_ESCAPES} ]] && printf "\e[?25l"
_process_arguments
END="$(printf "%(%s)T\\n" "-1")"
DIFF="$((END - START))"
"${QUIET:-_print_center}" "normal" " Time Elapsed: ""$((DIFF / 60))"" minute(s) and ""$((DIFF % 60))"" seconds " "="
}
{ [[ -z ${SOURCED_GUPLOAD} ]] && main "${@}"; } || :