1
0
mirror of https://github.com/deajan/obackup.git synced 2025-05-10 20:23:41 +02:00

Compare commits

..

237 Commits

Author SHA1 Message Date
deajan
8ccf304a20 Fixed remote du 2020-05-04 12:21:02 +01:00
deajan
e4b80c85cc Update ofunctions 2020-05-04 12:09:27 +01:00
deajan
b9cf079f15 Do not double quote twice 2020-05-04 11:58:15 +01:00
deajan
19043ade8c GPG 2.1.11 pinentry hell 2020-05-04 11:44:02 +01:00
deajan
a47771964c On the gpg 2.1.11 fix route 2020-05-04 11:35:16 +01:00
deajan
ac7f0376f5 Another try for gpg 2.1.11 fix 2020-05-04 11:30:36 +01:00
deajan
deb28458b1 Add fix for GPG 2.1.11 2020-05-04 11:22:34 +01:00
deajan
ff94a0cbbc Enable --pinentry-mode for GPG tests 2020-05-04 11:15:56 +01:00
deajan
3d97324366 Do not double quote dirList 2020-05-04 11:08:08 +01:00
deajan
9a43145dc1 Simple quote FILE_SIZE_LIST contents 2020-05-04 10:57:44 +01:00
deajan
3dc0fb8aec Simple quote FILE_SIZE_LIST contentsé 2020-05-04 10:57:27 +01:00
deajan
b963e67016 Quote all files 2020-05-03 09:14:59 +01:00
deajan
b5a951fcce Rebuilt targets 2020-05-03 09:08:02 +01:00
deajan
20946f6f24 Fix possible issue with paranthesis in source/dest dirnames 2020-05-03 09:06:57 +01:00
deajan
8210fe9e7f Make sure all files are quoted 2020-05-03 09:05:27 +01:00
deajan
7c51784aa1 Update ofunctions from osync project 2020-05-03 09:05:20 +01:00
root
d36386ee61 Rebuilt targets 2020-03-15 23:23:24 +01:00
root
823c6034db Fixed double sql backup on errro 2020-03-15 23:23:05 +01:00
root
30ea816a71 Rebuilt targets 2020-03-15 22:58:12 +01:00
root
01bb628b23 Fix didn't rename include footer for GenericTrapQuit 2020-03-15 22:57:46 +01:00
root
cef75e3e15 Fix didn't rename include footer for GenericTrapQuit 2020-03-15 22:56:47 +01:00
root
56013a3cac Renamed ofunctions TrapQuit to GeneralTrapQuit 2020-03-15 22:54:05 +01:00
deajan
5ab76ce5c9 Added upgrade finished message 2019-10-21 13:59:31 +02:00
deajan
bfb3e7c906 Updated changelog 2019-08-09 10:34:17 +02:00
deajan
003aade0a8 Prevent potential bash buffer overflow, fixes #22 2019-08-09 10:33:18 +02:00
deajan
8868dcbb7f Better _REMOTE_TOKEN removal 2019-08-09 10:13:59 +02:00
deajan
d73ffccd06 Debugging gpg on travis 2019-07-23 11:43:00 +02:00
deajan
741167cba3 Try not to use loopback for gpg 2.1.11 2019-07-23 11:37:55 +02:00
deajan
fe75737910 Removed blank spaces 2019-07-23 11:07:27 +02:00
deajan
3ee026281b Fixed typo 2019-07-23 11:07:12 +02:00
deajan
0e568c9a10 Updated RSA key filename 2019-07-23 10:44:04 +02:00
deajan
ee36a4b3cb Updated ofunctions 2019-07-23 10:41:17 +02:00
deajan
6bbb7b6b80 Fix new folder insertions in upgrade script 2019-07-23 10:37:19 +02:00
deajan
6587123d11 Fixed authorized_keys cleanup after tests 2019-07-19 16:49:20 +02:00
deajan
8bc1237ef0 Travis tests debugging 2019-07-11 13:49:08 +02:00
deajan
e0edee8ebf Remote remote token after usage #17 of # 158 2019-07-09 11:31:20 +02:00
deajan
bef776072f Updated ofunctions from osync 2019-07-05 23:29:06 +02:00
deajan
da91b4353b Minor fixes in upgrade script 2019-07-05 23:28:23 +02:00
deajan
f4b7ef280b Fixed upgrade step on REMOTE_BACKUP 2019-05-21 15:44:17 +02:00
deajan
27a30e1521 Another typo bites the dust 2019-05-21 14:32:46 +02:00
deajan
7f25f9b152 Fixed horrible typo 2019-05-21 14:30:12 +02:00
deajan
27ff3dc9f8 Fixed bogus copy paste 2019-05-21 13:54:31 +02:00
deajan
e1e39cd2a5 Remove useless cat 2019-05-21 13:39:49 +02:00
deajan
c82c5ed3cd Removed unnecessary dot 2019-05-21 13:34:30 +02:00
deajan
60a3079b3a Fixed port in remote tests 2019-05-21 13:30:18 +02:00
deajan
bf78921600 Updated config files using update script 2019-05-21 13:24:10 +02:00
deajan
c8fb0de3ee Improved script to handle elder bad config files 2019-05-21 13:21:58 +02:00
deajan
68d1df9b18 Updated example configuration file 2019-05-21 12:54:42 +02:00
deajan
1d6fb00344 Improved upgrade script 2019-05-21 12:51:46 +02:00
deajan
d3c1f183a2 Updated test config files 2019-05-21 12:30:44 +02:00
deajan
be3b3f8c0c Updated changelog 2019-05-21 12:18:00 +02:00
deajan
556b1d8f35 Rebuilt targets 2019-05-21 12:16:08 +02:00
deajan
7b75d13b1f Udpated changelog 2019-05-21 12:15:46 +02:00
deajan
4a7994d2b2 Bumped version 2019-05-21 12:14:35 +02:00
deajan
a5e17202c6 Added required config file revision 2019-05-21 12:13:48 +02:00
deajan
703dc46af7 Updated default config file with revision 2019-05-21 12:12:58 +02:00
deajan
b3ff577ea0 Fixed --verbose --stats output 2019-05-21 12:03:35 +02:00
deajan
c70f0d6bed Rebuilt targets 2019-05-21 11:46:07 +02:00
deajan
7edaa48c6a Installer fixes from osync 2019-05-21 11:45:34 +02:00
deajan
56d779209f Code reorganization 2019-05-21 11:45:24 +02:00
deajan
01d6588d15 Bootstrap fixes from osync 2019-05-21 11:45:13 +02:00
deajan
f9af20fe3b Allow commandline override of config file values 2019-05-20 23:33:06 +02:00
deajan
9a5a8048bf Updated boolean logic in tests 2019-02-26 14:22:09 +01:00
deajan
233ec565e3 Rebuilt targets 2019-02-26 12:09:50 +01:00
deajan
42684fbdf8 Removed bogus SIMPLE log calls 2019-02-26 12:09:33 +01:00
deajan
0f18258e61 Changed default yes/no to booleans 2019-02-08 13:22:37 +01:00
deajan
3fccc45ac7 Never code bash and python at the same time 2019-01-14 15:34:08 +01:00
deajan
ead5712c90 Only install rngd on travis 2019-01-14 15:00:06 +01:00
deajan
8b5965ae4a Rebuilt targets 2019-01-03 10:10:08 +01:00
deajan
45464074ef Updated installer logic 2019-01-03 10:09:29 +01:00
deajan
69590167d8 Updated changelog for release 2019-01-03 10:08:00 +01:00
deajan
6b5f819b17 Update ofunctions 2019-01-03 10:07:03 +01:00
deajan
acb27bef1b Fixed push rotation tests 2018-11-06 22:51:29 +01:00
deajan
e710729a88 Added double rotation checks 2018-11-06 19:17:32 +01:00
deajan
29e8369421 Updated changelog 2018-11-06 19:17:14 +01:00
deajan
93a5bf5b8d Rebuilt targets 2018-11-06 19:17:07 +01:00
deajan
d744f48f9b Added missing doublequotes for remote env 2018-11-06 17:55:27 +01:00
deajan
768d17c2cb Rebuilt targets 2018-11-06 15:33:16 +01:00
deajan
9cf5574488 Implemented #15 2018-11-06 15:17:24 +01:00
deajan
d1f344e879 Fixed encryption 2018-11-05 21:43:40 +01:00
deajan
40999fd9a2 Updated test suite 2018-11-05 21:43:25 +01:00
deajan
14fc50304a Fixed infamous recursive exclude bug 2018-11-05 21:04:20 +01:00
deajan
ff9b9b632b Updated ofunctions 2018-11-05 14:12:39 +01:00
deajan
5a51855691 Moved TrapQuit to script entry point 2018-10-10 00:08:49 +02:00
deajan
c33e6b69c1 More shellcheck exclusions 2018-10-02 21:22:11 +02:00
deajan
8079a2e9a5 Update bootstrap 2018-10-02 21:21:58 +02:00
deajan
657f828c74 Updated common installer and batch 2018-10-02 21:21:23 +02:00
deajan
0b4a2543d4 merge.sh 2018-10-02 21:19:52 +02:00
deajan
19ee83bb52 Updated ofunctions 2018-10-02 21:19:23 +02:00
deajan
41f64388d9 Added default SKIP_REMOTE value 2018-10-02 19:53:06 +02:00
deajan
3276c9189e Updated tests syntax header 2018-10-02 19:48:04 +02:00
deajan
a0384fc452 Rebuilt targets 2018-10-01 15:12:32 +02:00
deajan
f4525f0f13 Quicker PoorMansRandomGenerator function + shellcheck fixes 2018-10-01 15:12:16 +02:00
deajan
512bf8e5a4 Rebuilt targets 2018-09-30 17:18:09 +02:00
deajan
ee52fe13a9 Fixed remote backup rotation 2018-09-30 17:17:45 +02:00
deajan
83aa596568 More detailled rotation logs 2018-09-30 17:11:18 +02:00
deajan
f0eee8897f Rebuilt targets 2018-09-30 17:06:13 +02:00
deajan
fec66efe2a Also don't move data on file Rotation remotely 2018-09-30 17:05:50 +02:00
deajan
0a17c17c28 Updated shunit2 test framework 2018-09-30 16:31:34 +02:00
deajan
8070640adc Updated changelog 2018-09-30 16:28:51 +02:00
deajan
a8d5814b8f Rebuilt targets 2018-09-30 16:27:18 +02:00
deajan
f72d1366ff Always make delta copies on rotation, even in local mode 2018-09-30 16:26:52 +02:00
deajan
7844475278 Update copyright year 2018-09-30 16:17:23 +02:00
deajan
a12d530fee Added more log infos 2018-09-30 16:16:57 +02:00
deajan
cf78b6e5f9 Rebuilt targets 2018-09-30 15:32:19 +02:00
deajan
8f7f30f937 Fixed typo 2018-09-30 15:30:00 +02:00
deajan
0a204d47a2 Missing env for remote TSTAMP 2018-09-30 15:16:55 +02:00
deajan
f95328ce39 Rebuilt targets 2018-09-30 14:24:50 +02:00
deajan
f3014cb029 Added more exclusion explanation 2018-09-30 14:23:24 +02:00
deajan
3aa1837f1a Fixed rsync args built logic 2018-09-30 14:20:39 +02:00
deajan
78abc689f9 Rebuilt targets 2018-09-30 14:01:28 +02:00
deajan
3ca611584c Fixed typo 2018-09-30 14:01:03 +02:00
deajan
ec65960dc7 Rebuilt targets 2018-09-30 13:56:00 +02:00
deajan
edae3ff2b1 Fixes #11 2018-09-30 13:55:34 +02:00
deajan
c56b1d6031 Updated RSYNC_ARGS to recursive and non recursive styles 2018-09-30 13:51:06 +02:00
deajan
33fff1c5a2 Updated date format 2018-09-30 13:44:38 +02:00
deajan
e83474614f Rebuilt targets 2018-09-17 11:00:30 +02:00
deajan
0a10ea99d1 Installer fixes from osync 2018-09-12 21:14:06 +02:00
deajan
6fa917d56e Fixed empty source dir in logs 2018-09-12 20:55:45 +02:00
deajan
743daeb3d9 Rebuilt targets 2018-07-30 17:35:26 +02:00
deajan
019e8aa68f Fixed non recursive backups being recursive 2018-07-30 17:35:07 +02:00
deajan
b9799cfb33 Better logging on file backups 2018-07-30 17:33:40 +02:00
deajan
c57688b035 Rebuilt targets 2018-07-30 16:39:23 +02:00
deajan
3632fd3972 Multiple path fixes 2018-07-30 16:38:59 +02:00
deajan
bb77f1f5ec Removed double / in _ListRecursiveBackupDirectories 2018-07-30 16:34:46 +02:00
deajan
e47e179169 Fix for path strip on '/' backups 2018-07-30 16:28:12 +02:00
deajan
c525b449f9 Rebuilt targets 2018-07-30 16:22:20 +02:00
deajan
604d381091 Fix for '/' recursive backups stripping path 2018-07-30 16:22:02 +02:00
deajan
5d460c3916 Rebuilt targets 2018-07-30 15:36:53 +02:00
deajan
5b0442caed Updated ofunctions 2018-07-30 15:36:39 +02:00
deajan
fdb88e04f2 Updated merge from osync 2018-07-30 15:36:31 +02:00
deajan
7e432c340e Added busybox redirect comment 2018-06-04 14:14:24 +02:00
deajan
c7e3bc3e5f Updated merge / bootstrap from osync 2018-03-25 14:16:31 +02:00
deajan
d9b61f2aaf Updated common installer from osync 2018-03-25 14:15:45 +02:00
deajan
b800f25d75 Fixed various time check variables 2018-03-15 20:14:18 +01:00
deajan
e190278fe6 Rebuilt targets 2018-03-15 10:01:34 +01:00
deajan
dc9949de4b Fixed Exec hooks time control 2018-02-26 00:41:54 +01:00
deajan
15fa534df0 Fix typos in execution hooks 2018-02-26 00:32:04 +01:00
deajan
7fd2fcf703 Updated changelog 2018-02-24 17:42:36 +01:00
deajan
2e2b757fd7 Rebuilt targets 2018-02-24 17:41:57 +01:00
deajan
71d7445c76 Updated ofunctions 2018-02-24 17:41:33 +01:00
deajan
5612badadb Added comment for SMB shares 2018-02-20 22:46:07 +01:00
deajan
2a270fc9c3 Workaround for language: bash failing with mysql 2018-01-30 17:10:10 +01:00
deajan
3147e30965 Trying language php to force mysql install 2018-01-30 16:36:16 +01:00
deajan
2eed11303b The saarch for mysql continues... 2018-01-29 23:10:25 +01:00
deajan
1ab7176194 Travis mysql Hell 2018-01-29 22:56:46 +01:00
deajan
792d1415f2 Travis mysql hell 2018-01-29 22:40:32 +01:00
deajan
a2c5f6e51d Another travis syntax fix 2018-01-26 23:36:24 +01:00
deajan
299f242e89 Fixed travis file syntax 2018-01-26 23:05:01 +01:00
deajan
204680d108 Travis is trying ruby ??? 2018-01-26 18:15:56 +01:00
deajan
d038e5dae4 Still trying to get mysql to work 2018-01-26 18:13:29 +01:00
deajan
e85ddfa2ae Still trying to get mysql to work 2018-01-26 18:11:28 +01:00
deajan
08f017596b Rebuilt targets 2018-01-19 16:08:59 +01:00
deajan
b4a7f3018a Fixed ExecTasks call syntax 2018-01-19 16:08:05 +01:00
deajan
2ba247e2eb Revert to precise 2018-01-04 22:37:07 +01:00
deajan
2653ea8923 Mysql hell git add .travis.yml! Thanks travis 2018-01-04 00:09:51 +01:00
deajan
631c6d3565 More mysql tries 2018-01-04 00:03:31 +01:00
deajan
af81f46935 Update travis for debugging 2018-01-03 23:55:53 +01:00
deajan
1bd73f2dec Travis mysql debugging 2018-01-03 23:45:03 +01:00
deajan
8f2cd131fe Add missing singequote 2018-01-03 23:40:16 +01:00
deajan
2a0245c2d5 Travis mysql... 2018-01-03 22:47:58 +01:00
deajan
b53531cac3 Switching to mysql 5.6 2018-01-03 22:45:25 +01:00
deajan
3a330b841c Another travis mysql fix 2018-01-03 22:32:42 +01:00
deajan
f3a9e1d05e Rebuilt targets 2018-01-03 22:32:19 +01:00
deajan
c5ad174a0e Improved time limit enforcement 2018-01-03 22:31:21 +01:00
deajan
f8d191cb54 Trying fix for Travis #6842 2018-01-03 22:27:04 +01:00
deajan
42a2942557 Updated ExecTasks 2018-01-03 17:50:40 +01:00
deajan
9dc22e6855 Updated ofunctions 2018-01-03 17:41:30 +01:00
deajan
884fcf7eff Rebuilt targets for v2.1-beta3 2017-06-21 15:01:28 +02:00
deajan
92da7e68de Bumped version to v2.1-beta3 2017-06-21 15:01:03 +02:00
deajan
c02d301a45 Rebuilt targets 2017-06-20 19:02:22 +02:00
deajan
a48c55ef69 Updated changelog 2017-06-20 18:59:45 +02:00
deajan
83fdc5ee22 Moved rsync code before init 2017-06-20 16:23:14 +02:00
deajan
f097999bd3 Fixed missing rsync args 2017-06-20 16:20:44 +02:00
deajan
9a9a7ac8a3 Better cmdline option management 2017-06-20 15:56:07 +02:00
deajan
c60ac3784d Rebuilt targets 2017-06-20 15:23:29 +02:00
deajan
02fcf746ef Updated merge syntax 2017-06-20 15:22:57 +02:00
deajan
bd06dfe397 Made merge and bootstrap program agonstic 2017-06-20 15:22:37 +02:00
deajan
c7eb51f8a3 Fixed missing fi 2017-06-20 14:53:12 +02:00
deajan
ee689da7f3 Allow --partial, --delete and --dontgetsize to override config files 2017-06-20 14:50:50 +02:00
deajan
27a2b26473 Rebuilt targets 2017-06-09 11:50:10 +02:00
deajan
c2b14377c3 Update ofunctions 2017-06-09 11:49:53 +02:00
deajan
f15f216989 Rebuilt targets 2017-06-09 09:58:51 +02:00
deajan
0ee0bf52fd Updated ofunctions 2017-06-09 09:58:30 +02:00
deajan
8cd3ac6a50 Merge branch 'master' of https://github.com/deajan/obackup 2017-05-30 09:54:08 +02:00
deajan
e916df3e87 Update readme 2017-05-30 09:54:04 +02:00
deajan
be4ba9f366 Update ofunctions 2017-05-30 09:53:30 +02:00
Orsiris de Jong
cbdd836ddd Added BSD license badge 2017-05-22 14:57:55 +02:00
deajan
fbd5317010 Updated ofunctions 2017-05-22 12:05:50 +02:00
deajan
64c32bca07 Update ofunctions 2017-04-20 21:24:04 +02:00
deajan
644346316c Check program type for service files 2017-04-17 20:26:40 +02:00
deajan
304dace821 Added VerComp Subset 2017-04-11 13:28:32 +02:00
deajan
d311ef0574 Update ofunctions 2017-04-11 13:28:08 +02:00
deajan
56010d3a0d Update installer from pmocr 2017-04-11 13:27:58 +02:00
deajan
fefc01983d Added default umask 2017-04-08 22:11:27 +02:00
deajan
faafa81d10 Added umask for log & run dir files 2017-04-08 21:11:53 +02:00
deajan
5302c8c5f0 Updated common_batch from osync project 2017-03-30 22:19:41 +02:00
deajan
e2e4f4b563 Added --remove option to doc 2017-03-30 22:18:52 +02:00
deajan
67ecf9ea0a Update ofunctions from osync project 2017-03-30 22:18:36 +02:00
deajan
6f51b472c5 Updated installer from pmocr 2017-03-14 22:42:01 +01:00
deajan
f342cdb3b0 Rebuilt targets 2017-03-14 22:33:52 +01:00
deajan
2bb56d07b8 Fixed wrong program name 2017-03-14 22:33:35 +01:00
deajan
f291611011 Rebuilt targets 2017-03-14 22:31:15 +01:00
deajan
5d85b58f48 Updated changelog 2017-03-14 22:30:24 +01:00
deajan
30c88daaf2 Removed gzip --rsyncable option for MacOS 2017-03-14 22:18:15 +01:00
deajan
b121d96d69 Updated TSTAMP readability 2017-03-14 22:15:25 +01:00
deajan
375f5d413b Updated installer version logic 2017-03-14 22:15:13 +01:00
deajan
9474629d31 Check remote operation before remote cmd run 2017-02-13 12:16:10 +01:00
deajan
f4d587f1d1 Revert "Check backup type before remote system URI"
This reverts commit 52161b3faf.
2017-02-11 11:23:29 +01:00
deajan
52161b3faf Check backup type before remote system URI 2017-02-11 11:17:44 +01:00
deajan
34ba82784b Don't uninstall ssh filter if needed 2017-02-10 11:42:31 +01:00
deajan
d629105d92 Updated changelog 2017-02-10 11:40:59 +01:00
deajan
bc2705273f Funnier obfuscation :) 2017-02-10 11:06:40 +01:00
deajan
041ad4eaa7 More fine grained _REMOTE_TOKEN obfuscation 2017-02-10 10:59:56 +01:00
deajan
32c29f2ea3 Obfuscate _REMOTE_TOKEN in logs 2017-02-10 10:50:40 +01:00
deajan
bc799380a4 Don't show failed commands on stdout, only log them 2017-02-10 10:40:56 +01:00
deajan
b4829a798f Remove unnecessary spaces 2017-02-10 10:34:40 +01:00
deajan
a54577f6c8 rngd also needs sudo for travis 2017-02-09 13:30:03 +01:00
deajan
3b71ad70d2 Enabled sudo on travis 2017-02-09 13:10:38 +01:00
deajan
249e37db32 Fixed _REMOTE_TOKEN for rsync again (common) 2017-02-09 12:53:38 +01:00
deajan
d395e99640 Fixed wrong place for _REMOTE_TOKEN in rsync function 2017-02-09 12:49:31 +01:00
deajan
90857f4248 Fixed some typos 2017-02-09 12:48:20 +01:00
deajan
1f3da3a952 Removed debugging line 2017-02-09 12:45:54 +01:00
deajan
b5ef76bfa3 Added optional separator for SetConfFileValues 2017-02-09 12:45:27 +01:00
deajan
7a930c9aef Fixed typo 2017-02-09 12:35:33 +01:00
deajan
900783f543 Added _REMOTE_TOKEN option for ssh filter 2017-02-09 12:30:56 +01:00
deajan
ddd8e9eef3 Updated ssh filter from osync project 2017-02-09 12:28:30 +01:00
deajan
0a2df4efe9 Added _REMOTE_TOKEN and MAIL_BODY_CHARSET options 2017-02-09 12:28:02 +01:00
deajan
30f6e4e02c Added ssh_filter token options 2017-02-09 12:22:47 +01:00
deajan
7fc7676473 Imported ssh_filter fixes from osync 2017-02-09 12:21:11 +01:00
deajan
39cc2ca4b2 Update merger from osync 2017-02-09 12:13:36 +01:00
deajan
88a0718636 Update installer from osync 2017-02-09 12:13:10 +01:00
deajan
252df96e59 Updated ofunctions from osync 2017-02-09 12:12:41 +01:00
deajan
06efa18901 Fixed some typos 2017-01-09 12:30:38 +01:00
deajan
365c93a8a3 dd bs numeric format is not mac compatible 2017-01-09 11:21:47 +01:00
deajan
84bf01f2c8 Typos in assertEquals 2017-01-09 11:15:05 +01:00
deajan
84fa92e46c More preflight checks 2017-01-09 11:13:57 +01:00
40 changed files with 13986 additions and 4240 deletions

View File

@ -1,12 +1,13 @@
language: # Necessary evil: if 'bash' is selected as language, travis will try to install ruby and fails
bash language: php
sudo: required
services: services:
mysql - mysql
os: os:
linux linux
osx
before_script: before_script:
mysql -e 'CREATE DATABASE travistest;' mysql -e 'CREATE DATABASE travistest;'

View File

@ -9,6 +9,45 @@ KNOWN ISSUES
CHANGELOG CHANGELOG
--------- ---------
dd Mmm YYYY: obackup v2.1 RC2 released
--------------------------------------
- Added a default required config file revision
- ! Update script updated accordingly
- Updated ofunctions to use booleans instead of yes/no syntax which still works
- Fixed verbose rsync output not working
- Fixed a potential bash buffer overflow when very large file lists are logged
03 Jan 2019: obackup v2.1 RC1 released
--------------------------------------
- File backup rotation will always make copies instead of moving files, in order to let rsync do deltas even in local backup scheme
- Fixed non recursive backups being recursive (bug introduced in a8b6bcb)
- Fixed multiple trailing slashes when backing-up '/'
- Updated ofunctions
- Upgraded shunit2 test framework to v2.1.8pre (git commit 07bb329)
- Minor fixes
20 Jun 2017: obackup v2.1 beta3 released
----------------------------------------
- Fixed regression where some commandline arguments weren't honored anymore since 2.1 beta1 (--delete, --stats, --dontgetsize)
- Fixed commandline arguments aren't checked against valid list
14 Mar 2017: obackup v2.1 beta2 released
----------------------------------------
- Fixed remote commands can be run on local runs and obviously fail
- Uninstall leaves ssh_filter if needed by other programs
- Logger now obfuscates _REMOTE_TOKEN
- Improved sudo privilege run
- Brand new ssh filter from osync project
- Better installer with --remove option from osync project
- Updated ofunctions from osync project
- Fixes UTF-8 escaped characters in log files due to LC_ALL=C
- Optional MAIL_BODY_CHARSET so destination mails aren't sent as UTF-8 anymore depending on systems
- Minor fixes
04 Jan 2017: obackup v2.1 beta1 released 04 Jan 2017: obackup v2.1 beta1 released
---------------------------------------- ----------------------------------------

View File

@ -1,7 +1,8 @@
# obackup [![Build Status](https://travis-ci.org/deajan/obackup.svg?branch=master)](https://travis-ci.org/deajan/obackup) [![GitHub Release](https://img.shields.io/github/release/deajan/obackup.svg?label=Latest)](https://github.com/deajan/obackup/releases/latest) # obackup [![Build Status](https://travis-ci.org/deajan/obackup.svg?branch=master)](https://travis-ci.org/deajan/obackup) [![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) [![GitHub Release](https://img.shields.io/github/release/deajan/obackup.svg?label=Latest)](https://github.com/deajan/obackup/releases/latest)
A robust file & database backup script that works for local and remote push or pull backups via ssh. A robust file & database backup script that works for local and remote push or pull backups via ssh.
Designed to backup multiple subdirectories with a timeslot for each.
Supports encryption while still using rsync to lower transfered data (see advantages and caveats below).
## About ## About
@ -34,7 +35,7 @@ You may disable this behavior in the config file.
You can download the latest obackup script from authors website. You can download the latest obackup script from authors website.
You may also clone the following git which will maybe have some more recent builds. You may also clone the following git which will maybe have some more recent builds.
$ git clone -b "v2.0-maint" git://github.com/deajan/obackup.git $ git clone -b "v2.1-maint" git://github.com/deajan/obackup.git
$ cd obackup $ cd obackup
$ ./install.sh $ ./install.sh

View File

@ -1,30 +1,65 @@
#!/usr/bin/env bash #!/usr/bin/env bash
## dev pre-processor bootstrap rev 2016121302 ## dev pre-processor bootstrap rev 2019052001
## Yeah !!! A really tech sounding name... In fact it's just include emulation in bash ## Yeah !!! A really tech sounding name... In fact it's just include emulation in bash
function Usage {
echo "$0 - Quick and dirty preprocessor for including ofunctions into programs"
echo "Creates and executes $0.tmp.sh"
echo "Usage:"
echo ""
echo "$0 --program=osync|obackup|pmocr [options to pass to program]"
echo "Can also be run with BASHVERBOSE=yes environment variable in order to prefix program with bash -x"
}
if [ ! -f "./merge.sh" ]; then if [ ! -f "./merge.sh" ]; then
echo "Plrase run bootstrap.sh from osync/dev directory." echo "Plrase run bootstrap.sh from osync/dev directory."
exit 1 exit 1
fi fi
bootstrapProgram=""
opts=()
outputFileName="$0" outputFileName="$0"
for i in "${@}"; do
case "$i" in
--program=*)
bootstrapProgram="${i##*=}"
;;
*)
opts+=("$i")
;;
esac
done
if [ "$bootstrapProgram" == "" ]; then
Usage
exit 128
else
source "merge.sh" source "merge.sh"
__PREPROCESSOR_PROGRAM=obackup
__PREPROCESSOR_PROGRAM=$bootstrapProgram
__PREPROCESSOR_PROGRAM_EXEC="n_$bootstrapProgram.sh"
__PREPROCESSOR_Constants __PREPROCESSOR_Constants
cp "n_$__PREPROCESSOR_PROGRAM.sh" "$outputFileName.tmp.sh" if [ ! -f "$__PREPROCESSOR_PROGRAM_EXEC" ]; then
echo "Cannot find file $__PREPROCESSOR_PROGRAM executable [n_$bootstrapProgram.sh]."
exit 1
fi
fi
cp "$__PREPROCESSOR_PROGRAM_EXEC" "$outputFileName.tmp.sh"
if [ $? != 0 ]; then if [ $? != 0 ]; then
echo "Cannot copy original file [n_$__PREPROCESSOR_PROGRAM.sh] to [$outputFileName.tmp.sh]." echo "Cannot copy original file [$__PREPROCESSOR_PROGRAM_EXEC] to [$outputFileName.tmp.sh]."
exit 1 exit 1
fi fi
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "$outputFileName.tmp.sh" __PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "$outputFileName.tmp.sh"
done done
chmod +x "$0.tmp.sh" chmod +x "$outputFileName.tmp.sh"
if [ $? != 0 ]; then if [ $? != 0 ]; then
echo "Cannot make [$outputFileName] executable.." echo "Cannot make [$outputFileName] executable."
exit 1 exit 1
fi fi
@ -33,4 +68,8 @@ if type termux-fix-shebang > /dev/null 2>&1; then
termux-fix-shebang "$outputFileName.tmp.sh" termux-fix-shebang "$outputFileName.tmp.sh"
fi fi
"$outputFileName.tmp.sh" "$@" if [ "$BASHVERBOSE" == "yes" ]; then
bash -x "$outputFileName.tmp.sh" "${opts[@]}"
else
"$outputFileName.tmp.sh" "${opts[@]}"
fi

View File

@ -1,9 +1,9 @@
#!/usr/bin/env bash #!/usr/bin/env bash
SUBPROGRAM=[prgname] SUBPROGRAM=[prgname]
PROGRAM="$SUBPROGRAM-batch" # Batch program to run osync / obackup instances sequentially and rerun failed ones PROGRAM="$SUBPROGRAM-batch" # Batch program to run osync / obackup instances sequentially and rerun failed ones
AUTHOR="(L) 2013-2016 by Orsiris de Jong" AUTHOR="(L) 2013-2020 by Orsiris de Jong"
CONTACT="http://www.netpower.fr - ozy@netpower.fr" CONTACT="http://www.netpower.fr - ozy@netpower.fr"
PROGRAM_BUILD=2016120401 PROGRAM_BUILD=2020031502
## Runs an osync /obackup instance for every conf file found ## Runs an osync /obackup instance for every conf file found
## If an instance fails, run it again if time permits ## If an instance fails, run it again if time permits
@ -26,36 +26,19 @@ else
LOG_FILE=./$SUBPROGRAM-batch.log LOG_FILE=./$SUBPROGRAM-batch.log
fi fi
## Default directory where to store temporary run files
if [ -w /tmp ]; then
RUN_DIR=/tmp
elif [ -w /var/tmp ]; then
RUN_DIR=/var/tmp
else
RUN_DIR=.
fi
# No need to edit under this line ############################################################## # No need to edit under this line ##############################################################
function _logger { include #### Logger SUBSET ####
local value="${1}" # What to log include #### CleanUp SUBSET ####
echo -e "$value" >> "$LOG_FILE" include #### GenericTrapQuit SUBSET ####
}
function Logger {
local value="${1}" # What to log
local level="${2}" # Log level: DEBUG, NOTICE, WARN, ERROR, CRITIAL
prefix="$(date) - "
if [ "$level" == "CRITICAL" ]; then
_logger "$prefix\e[41m$value\e[0m"
elif [ "$level" == "ERROR" ]; then
_logger "$prefix\e[91m$value\e[0m"
elif [ "$level" == "WARN" ]; then
_logger "$prefix\e[93m$value\e[0m"
elif [ "$level" == "NOTICE" ]; then
_logger "$prefix$value"
elif [ "$level" == "DEBUG" ]; then
if [ "$DEBUG" == "yes" ]; then
_logger "$prefix$value"
fi
else
_logger "\e[41mLogger function called without proper loglevel.\e[0m"
_logger "$prefix$value"
fi
}
function CheckEnvironment { function CheckEnvironment {
## osync / obackup executable full path can be set here if it cannot be found on the system ## osync / obackup executable full path can be set here if it cannot be found on the system
@ -119,15 +102,15 @@ function Batch {
fi fi
done done
runList=("${runAgainList[@]}") runList=("${runAgainList[@]}")
runs=$(($runs + 1)) runs=$((runs + 1))
done done
fi fi
} }
function Usage { function Usage {
echo "$PROGRAM $PROGRAM_BUILD" echo "$PROGRAM $PROGRAM_BUILD"
echo $AUTHOR echo "$AUTHOR"
echo $CONTACT echo "$CONTACT"
echo "" echo ""
echo "Batch script to sequentially run osync or obackup instances and rerun failed ones." echo "Batch script to sequentially run osync or obackup instances and rerun failed ones."
echo "Usage: $PROGRAM.sh [OPTIONS] [$SUBPROGRAM OPTIONS]" echo "Usage: $PROGRAM.sh [OPTIONS] [$SUBPROGRAM OPTIONS]"
@ -145,6 +128,8 @@ function Usage {
exit 128 exit 128
} }
trap GenericTrapQuit TERM EXIT HUP QUIT
opts="" opts=""
for i in "$@" for i in "$@"
do do

View File

@ -1,40 +1,34 @@
#!/usr/bin/env bash #!/usr/bin/env bash
include #### _OFUNCTIONS_BOOTSTRAP SUBSET #### ## Installer script suitable for osync / obackup / pmocr
PROGRAM=[prgname] PROGRAM=[prgname]
PROGRAM_VERSION=[version]
PROGRAM_VERSION=$(grep "PROGRAM_VERSION=" $PROGRAM.sh)
PROGRAM_VERSION=${PROGRAM_VERSION#*=}
PROGRAM_BINARY=$PROGRAM".sh" PROGRAM_BINARY=$PROGRAM".sh"
PROGRAM_BATCH=$PROGRAM"-batch.sh" PROGRAM_BATCH=$PROGRAM"-batch.sh"
SCRIPT_BUILD=2016122701 SSH_FILTER="ssh_filter.sh"
SCRIPT_BUILD=2020042901
INSTANCE_ID="installer-$SCRIPT_BUILD"
## osync / obackup / pmocr / zsnap install script ## osync / obackup / pmocr / zsnap install script
## Tested on RHEL / CentOS 6 & 7, Fedora 23, Debian 7 & 8, Mint 17 and FreeBSD 8, 10 and 11 ## Tested on RHEL / CentOS 6 & 7, Fedora 23, Debian 7 & 8, Mint 17 and FreeBSD 8, 10 and 11
## Please adapt this to fit your distro needs ## Please adapt this to fit your distro needs
include #### OFUNCTIONS MICRO SUBSET ####
# Get current install.sh path from http://stackoverflow.com/a/246128/2635443 # Get current install.sh path from http://stackoverflow.com/a/246128/2635443
SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" SCRIPT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
CONF_DIR=$FAKEROOT/etc/$PROGRAM _LOGGER_SILENT=false
BIN_DIR="$FAKEROOT/usr/local/bin" _STATS=1
SERVICE_DIR_INIT=$FAKEROOT/etc/init.d ACTION="install"
# Should be /usr/lib/systemd/system, but /lib/systemd/system exists on debian & rhel / fedora FAKEROOT=""
SERVICE_DIR_SYSTEMD_SYSTEM=$FAKEROOT/lib/systemd/system
SERVICE_DIR_SYSTEMD_USER=$FAKEROOT/etc/systemd/user
## osync specific code
OSYNC_SERVICE_FILE_INIT="osync-srv"
OSYNC_SERVICE_FILE_SYSTEMD_SYSTEM="osync-srv@.service"
OSYNC_SERVICE_FILE_SYSTEMD_USER="osync-srv@.service.user"
## pmocr specfic code
PMOCR_SERVICE_FILE_INIT="pmocr-srv"
PMOCR_SERVICE_FILE_SYSTEMD_SYSTEM="pmocr-srv@.service"
## Generic code
## Default log file ## Default log file
if [ -w $FAKEROOT/var/log ]; then if [ -w "$FAKEROOT/var/log" ]; then
LOG_FILE="$FAKEROOT/var/log/$PROGRAM-install.log" LOG_FILE="$FAKEROOT/var/log/$PROGRAM-install.log"
elif ([ "$HOME" != "" ] && [ -w "$HOME" ]); then elif ([ "$HOME" != "" ] && [ -w "$HOME" ]); then
LOG_FILE="$HOME/$PROGRAM-install.log" LOG_FILE="$HOME/$PROGRAM-install.log"
@ -42,11 +36,15 @@ else
LOG_FILE="./$PROGRAM-install.log" LOG_FILE="./$PROGRAM-install.log"
fi fi
include #### QuickLogger SUBSET ####
include #### UrlEncode SUBSET #### include #### UrlEncode SUBSET ####
include #### GetLocalOS SUBSET #### include #### GetLocalOS SUBSET ####
include #### GetConfFileValue SUBSET ####
include #### CleanUp SUBSET ####
include #### GenericTrapQuit SUBSET ####
function SetLocalOSSettings { function SetLocalOSSettings {
USER=root USER=root
DO_INIT=true
# LOCAL_OS and LOCAL_OS_FULL are global variables set at GetLocalOS # LOCAL_OS and LOCAL_OS_FULL are global variables set at GetLocalOS
@ -56,23 +54,25 @@ function SetLocalOSSettings {
;; ;;
*"MacOSX"*) *"MacOSX"*)
GROUP=admin GROUP=admin
DO_INIT=false
;; ;;
*"msys"*|*"Cygwin"*) *"Cygwin"*|*"Android"*|*"msys"*|*"BusyBox"*)
USER="" USER=""
GROUP="" GROUP=""
DO_INIT=false
;; ;;
*) *)
GROUP=root GROUP=root
;; ;;
esac esac
if [ "$LOCAL_OS" == "Android" ] || [ "$LOCAL_OS" == "MacOSX" ] || [ "$LOCAL_OS" == "BusyBox" ]; then if [ "$LOCAL_OS" == "Android" ] || [ "$LOCAL_OS" == "BusyBox" ]; then
QuickLogger "Cannot be installed on [$LOCAL_OS]. Please use $PROGRAM.sh directly." Logger "Cannot be installed on [$LOCAL_OS]. Please use $PROGRAM.sh directly." "CRITICAL"
exit 1 exit 1
fi fi
if ([ "$USER" != "" ] && [ "$(whoami)" != "$USER" ] && [ "$FAKEROOT" == "" ]); then if ([ "$USER" != "" ] && [ "$(whoami)" != "$USER" ] && [ "$FAKEROOT" == "" ]); then
QuickLogger "Must be run as $USER." Logger "Must be run as $USER." "CRITICAL"
exit 1 exit 1
fi fi
@ -80,138 +80,198 @@ function SetLocalOSSettings {
} }
function GetInit { function GetInit {
if [ -f /sbin/init ]; then if [ -f /sbin/openrc-run ]; then
init="openrc"
Logger "Detected openrc." "NOTICE"
elif [ -f /sbin/init ]; then
if file /sbin/init | grep systemd > /dev/null; then if file /sbin/init | grep systemd > /dev/null; then
init="systemd" init="systemd"
Logger "Detected systemd." "NOTICE"
else else
init="initV" init="initV"
Logger "Detected initV." "NOTICE"
fi fi
else else
QuickLogger "Can't detect initV or systemd. Service files won't be installed. You can still run $PROGRAM manually or via cron." Logger "Can't detect initV, systemd or openRC. Service files won't be installed. You can still run $PROGRAM manually or via cron." "WARN"
init="none" init="none"
fi fi
} }
function CreateConfDir { function CreateDir {
if [ ! -d "$CONF_DIR" ]; then local dir="${1}"
mkdir "$CONF_DIR" local dirMask="${2}"
local dirUser="${3}"
local dirGroup="${4}"
if [ ! -d "$dir" ]; then
(
if [ $(IsInteger $dirMask) -eq 1 ]; then
umask $dirMask
fi
mkdir -p "$dir"
)
if [ $? == 0 ]; then if [ $? == 0 ]; then
QuickLogger "Created directory [$CONF_DIR]." Logger "Created directory [$dir]." "NOTICE"
else else
QuickLogger "Cannot create directory [$CONF_DIR]." Logger "Cannot create directory [$dir]." "CRITICAL"
exit 1 exit 1
fi fi
fi
if [ "$dirUser" != "" ]; then
userGroup="$dirUser"
if [ "$dirGroup" != "" ]; then
userGroup="$userGroup"":$dirGroup"
fi
chown "$userGroup" "$dir"
if [ $? != 0 ]; then
Logger "Could not set directory ownership on [$dir] to [$userGroup]." "CRITICAL"
exit 1
else else
QuickLogger "Config directory [$CONF_DIR] exists." Logger "Set file ownership on [$dir] to [$userGroup]." "NOTICE"
fi
fi
}
function CopyFile {
local sourcePath="${1}"
local destPath="${2}"
local sourceFileName="${3}"
local destFileName="${4}"
local fileMod="${5}"
local fileUser="${6}"
local fileGroup="${7}"
local overwrite="${8:-false}"
local userGroup=""
if [ "$destFileName" == "" ]; then
destFileName="$sourceFileName"
fi
if [ -f "$destPath/$destFileName" ] && [ $overwrite == false ]; then
destFileName="$sourceFileName.new"
Logger "Copying [$sourceFileName] to [$destPath/$destFileName]." "NOTICE"
fi
cp "$sourcePath/$sourceFileName" "$destPath/$destFileName"
if [ $? != 0 ]; then
Logger "Cannot copy [$sourcePath/$sourceFileName] to [$destPath/$destFileName]. Make sure to run install script in the directory containing all other files." "CRITICAL"
Logger "Also make sure you have permissions to write to [$BIN_DIR]." "ERROR"
exit 1
else
Logger "Copied [$sourcePath/$sourceFileName] to [$destPath/$destFileName]." "NOTICE"
if [ "$(IsInteger $fileMod)" -eq 1 ]; then
chmod "$fileMod" "$destPath/$destFileName"
if [ $? != 0 ]; then
Logger "Cannot set file permissions of [$destPath/$destFileName] to [$fileMod]." "CRITICAL"
exit 1
else
Logger "Set file permissions to [$fileMod] on [$destPath/$destFileName]." "NOTICE"
fi
elif [ "$fileMod" != "" ]; then
Logger "Bogus filemod [$fileMod] for [$destPath] given." "WARN"
fi
if [ "$fileUser" != "" ]; then
userGroup="$fileUser"
if [ "$fileGroup" != "" ]; then
userGroup="$userGroup"":$fileGroup"
fi
chown "$userGroup" "$destPath/$destFileName"
if [ $? != 0 ]; then
Logger "Could not set file ownership on [$destPath/$destFileName] to [$userGroup]." "CRITICAL"
exit 1
else
Logger "Set file ownership on [$destPath/$destFileName] to [$userGroup]." "NOTICE"
fi
fi
fi fi
} }
function CopyExampleFiles { function CopyExampleFiles {
if [ -f "$SCRIPT_PATH/sync.conf.example" ]; then exampleFiles=()
cp "$SCRIPT_PATH/sync.conf.example" "$CONF_DIR/sync.conf.example" exampleFiles[0]="sync.conf.example" # osync
fi exampleFiles[1]="host_backup.conf.example" # obackup
exampleFiles[2]="exclude.list.example" # osync & obackup
exampleFiles[3]="snapshot.conf.example" # zsnap
exampleFiles[4]="default.conf" # pmocr
if [ -f "$SCRIPT_PATH/host_backup.conf.example" ]; then for file in "${exampleFiles[@]}"; do
cp "$SCRIPT_PATH/host_backup.conf.example" "$CONF_DIR/host_backup.conf.example" if [ -f "$SCRIPT_PATH/$file" ]; then
fi CopyFile "$SCRIPT_PATH" "$CONF_DIR" "$file" "$file" "" "" "" false
if [ -f "$SCRIPT_PATH/exlude.list.example" ]; then
cp "$SCRIPT_PATH/exclude.list.example" "$CONF_DIR/exclude.list.example"
fi
if [ -f "$SCRIPT_PATH/snapshot.conf.example" ]; then
cp "$SCRIPT_PATH/snapshot.conf.example" "$CONF_DIR/snapshot.conf.example"
fi
if [ -f "$SCRIPT_PATH/default.conf" ]; then
if [ -f "$CONF_DIR/default.conf" ]; then
cp "$SCRIPT_PATH/default.conf" "$CONF_DIR/default.conf.new"
QuickLogger "Copied default.conf to [$CONF_DIR/default.conf.new]."
else
cp "$SCRIPT_PATH/default.conf" "$CONF_DIR/default.conf"
fi
fi fi
done
} }
function CopyProgram { function CopyProgram {
cp "$SCRIPT_PATH/$PROGRAM_BINARY" "$BIN_DIR" binFiles=()
if [ $? != 0 ]; then binFiles[0]="$PROGRAM_BINARY"
QuickLogger "Cannot copy $PROGRAM_BINARY to [$BIN_DIR]. Make sure to run install script in the directory containing all other files." if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "obackup" ]; then
QuickLogger "Also make sure you have permissions to write to [$BIN_DIR]." binFiles[1]="$PROGRAM_BATCH"
exit 1 binFiles[2]="$SSH_FILTER"
else
chmod 755 "$BIN_DIR/$PROGRAM_BINARY"
QuickLogger "Copied $PROGRAM_BINARY to [$BIN_DIR]."
fi fi
if [ -f "$SCRIPT_PATH/$PROGRAM_BATCH" ]; then local user=""
cp "$SCRIPT_PATH/$PROGRAM_BATCH" "$BIN_DIR" local group=""
if [ $? != 0 ]; then
QuickLogger "Cannot copy $PROGRAM_BATCH to [$BIN_DIR]." if ([ "$USER" != "" ] && [ "$FAKEROOT" == "" ]); then
else user="$USER"
chmod 755 "$BIN_DIR/$PROGRAM_BATCH"
QuickLogger "Copied $PROGRAM_BATCH to [$BIN_DIR]."
fi fi
if ([ "$GROUP" != "" ] && [ "$FAKEROOT" == "" ]); then
group="$GROUP"
fi fi
if [ -f "$SCRIPT_PATH/ssh_filter.sh" ]; then for file in "${binFiles[@]}"; do
cp "$SCRIPT_PATH/ssh_filter.sh" "$BIN_DIR" CopyFile "$SCRIPT_PATH" "$BIN_DIR" "$file" "$file" 755 "$user" "$group" true
if [ $? != 0 ]; then done
QuickLogger "Cannot copy ssh_filter.sh to [$BIN_DIR]."
else
chmod 755 "$BIN_DIR/ssh_filter.sh"
if ([ "$USER" != "" ] && [ "$GROUP" != "" ] && [ "$FAKEROOT" == "" ]); then
chown $USER:$GROUP "$BIN_DIR/ssh_filter.sh"
fi
QuickLogger "Copied ssh_filter.sh to [$BIN_DIR]."
fi
fi
} }
function CopyServiceFiles { function CopyServiceFiles {
# OSYNC SPECIFIC if ([ "$init" == "systemd" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_SYSTEMD_SYSTEM" ]); then
if ([ "$init" == "systemd" ] && [ -f "$SCRIPT_PATH/$OSYNC_SERVICE_FILE_SYSTEMD_SYSTEM" ]); then CreateDir "$SERVICE_DIR_SYSTEMD_SYSTEM"
cp "$SCRIPT_PATH/$OSYNC_SERVICE_FILE_SYSTEMD_SYSTEM" "$SERVICE_DIR_SYSTEMD_SYSTEM" && cp "$SCRIPT_PATH/$OSYNC_SERVICE_FILE_SYSTEMD_USER" "$SERVICE_DIR_SYSTEMD_USER/$SERVICE_FILE_SYSTEMD_SYSTEM" CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_SYSTEM" "$SERVICE_FILE_SYSTEMD_SYSTEM" "$SERVICE_FILE_SYSTEMD_SYSTEM" "" "" "" true
if [ $? != 0 ]; then if [ -f "$SCRIPT_PATH/$SERVICE_FILE_SYSTEMD_USER" ]; then
QuickLogger "Cannot copy the systemd file to [$SERVICE_DIR_SYSTEMD_SYSTEM] or [$SERVICE_DIR_SYSTEMD_USER]." CreateDir "$SERVICE_DIR_SYSTEMD_USER"
else CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "$SERVICE_FILE_SYSTEMD_USER" "" "" "" true
QuickLogger "Created osync-srv service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]."
QuickLogger "Can be activated with [systemctl start osync-srv@instance.conf] where instance.conf is the name of the config file in $CONF_DIR."
QuickLogger "Can be enabled on boot with [systemctl enable osync-srv@instance.conf]."
QuickLogger "In userland, active with [systemctl --user start osync-srv@instance.conf]."
fi
elif ([ "$init" == "initV" ] && [ -f "$SCRIPT_PATH/$OSYNC_SERVICE_FILE_INIT" ]); then
cp "$SCRIPT_PATH/$OSYNC_SERVICE_FILE_INIT" "$SERVICE_DIR_INIT"
if [ $? != 0 ]; then
QuickLogger "Cannot copy osync-srv to [$SERVICE_DIR_INIT]."
else
chmod 755 "$SERVICE_DIR_INIT/$OSYNC_SERVICE_FILE_INIT"
QuickLogger "Created osync-srv service in [$SERVICE_DIR_INIT]."
QuickLogger "Can be activated with [service $OSYNC_SERVICE_FILE_INIT start]."
QuickLogger "Can be enabled on boot with [chkconfig $OSYNC_SERVICE_FILE_INIT on]."
fi
fi fi
# PMOCR SPECIFIC if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" ]; then
if ([ "$init" == "systemd" ] && [ -f "$SCRIPT_PATH/$PMOCR_SERVICE_FILE_SYSTEMD_SYSTEM" ]); then CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_SYSTEM" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM" "" "" "" true
cp "$SCRIPT_PATH/$PMOCR_SERVICE_FILE_SYSTEMD_SYSTEM" "$SERVICE_DIR_SYSTEMD_SYSTEM" Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
if [ $? != 0 ]; then
QuickLogger "Cannot copy the systemd file to [$SERVICE_DIR_SYSTEMD_SYSTEM] or [$SERVICE_DIR_SYSTEMD_USER]."
else
QuickLogger "Created pmocr-srv service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]."
QuickLogger "Can be activated with [systemctl start pmocr-srv@default.conf] where default.conf is the name of the config file in $CONF_DIR."
QuickLogger "Can be enabled on boot with [systemctl enable pmocr-srv@default.conf]."
fi fi
elif ([ "$init" == "initV" ] && [ -f "$SCRIPT_PATH/$PMOCR_SERVICE_FILE_INIT" ]); then if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" ]; then
cp "$SCRIPT_PATH/$PMOCR_SERVICE_FILE_INIT" "$SERVICE_DIR_INIT" CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_SYSTEMD_USER" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" "$TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER" "" "" "" true
if [ $? != 0 ]; then
QuickLogger "Cannot copy pmoct-srv to [$SERVICE_DIR_INIT]."
else
chmod 755 "$SERVICE_DIR_INIT/$PMOCR_SERVICE_FILE_INIT"
QuickLogger "Created osync-srv service in [$SERVICE_DIR_INIT]."
QuickLogger "Can be activated with [service $PMOCR_SERVICE_FILE_INIT start]."
QuickLogger "Can be enabled on boot with [chkconfig $PMOCR_SERVICE_FILE_INIT on]."
fi fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_SYSTEMD_SYSTEM] and [$SERVICE_DIR_SYSTEMD_USER]." "NOTICE"
Logger "Can be activated with [systemctl start SERVICE_NAME@instance.conf] where instance.conf is the name of the config file in $CONF_DIR." "NOTICE"
Logger "Can be enabled on boot with [systemctl enable $SERVICE_NAME@instance.conf]." "NOTICE"
Logger "In userland, active with [systemctl --user start $SERVICE_NAME@instance.conf]." "NOTICE"
elif ([ "$init" == "initV" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_INIT" ] && [ -d "$SERVICE_DIR_INIT" ]); then
#CreateDir "$SERVICE_DIR_INIT"
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$SERVICE_FILE_INIT" "$SERVICE_FILE_INIT" "755" "" "" true
if [ -f "$SCRIPT_PATH/$TARGET_HELPER_SERVICE_FILE_INIT" ]; then
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_INIT" "$TARGET_HELPER_SERVICE_FILE_INIT" "$TARGET_HELPER_SERVICE_FILE_INIT" "755" "" "" true
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_INIT]." "NOTICE"
Logger "Can be activated with [service $SERVICE_FILE_INIT start]." "NOTICE"
Logger "Can be enabled on boot with [chkconfig $SERVICE_FILE_INIT on]." "NOTICE"
elif ([ "$init" == "openrc" ] && [ -f "$SCRIPT_PATH/$SERVICE_FILE_OPENRC" ] && [ -d "$SERVICE_DIR_OPENRC" ]); then
# Rename service to usual service file
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_OPENRC" "$SERVICE_FILE_OPENRC" "$SERVICE_FILE_INIT" "755" "" "" true
if [ -f "$SCRPT_PATH/$TARGET_HELPER_SERVICE_FILE_OPENRC" ]; then
CopyFile "$SCRIPT_PATH" "$SERVICE_DIR_OPENRC" "$TARGET_HELPER_SERVICE_FILE_OPENRC" "$TARGET_HELPER_SERVICE_FILE_OPENRC" "755" "" "" true
Logger "Created optional service [$TARGET_HELPER_SERVICE_NAME] with same specifications as below." "NOTICE"
fi
Logger "Created [$SERVICE_NAME] service in [$SERVICE_DIR_OPENRC]." "NOTICE"
Logger "Can be activated with [rc-update add $SERVICE_NAME.instance] where instance is a configuration file found in /etc/osync." "NOTICE"
else
Logger "Cannot properly find how to deal with init on this system. Skipping service file installation." "NOTICE"
fi fi
} }
@ -230,54 +290,161 @@ function Statistics {
fi fi
fi fi
QuickLogger "Neiter wget nor curl could be used for. Cannot run statistics. Use the provided link please." Logger "Neiter wget nor curl could be used for. Cannot run statistics. Use the provided link please." "WARN"
return 1 return 1
} }
function RemoveFile {
local file="${1}"
if [ -f "$file" ]; then
rm -f "$file"
if [ $? != 0 ]; then
Logger "Could not remove file [$file]." "ERROR"
else
Logger "Removed file [$file]." "NOTICE"
fi
else
Logger "File [$file] not found. Skipping." "NOTICE"
fi
}
function RemoveAll {
RemoveFile "$BIN_DIR/$PROGRAM_BINARY"
if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "obackup" ]; then
RemoveFile "$BIN_DIR/$PROGRAM_BATCH"
fi
if [ ! -f "$BIN_DIR/osync.sh" ] && [ ! -f "$BIN_DIR/obackup.sh" ]; then # Check if any other program requiring ssh filter is present before removal
RemoveFile "$BIN_DIR/$SSH_FILTER"
else
Logger "Skipping removal of [$BIN_DIR/$SSH_FILTER] because other programs present that need it." "NOTICE"
fi
RemoveFile "$SERVICE_DIR_SYSTEMD_SYSTEM/$SERVICE_FILE_SYSTEMD_SYSTEM"
RemoveFile "$SERVICE_DIR_SYSTEMD_USER/$SERVICE_FILE_SYSTEMD_USER"
RemoveFile "$SERVICE_DIR_INIT/$SERVICE_FILE_INIT"
RemoveFile "$TARGET_HELPER_SERVICE_DIR_SYSTEMD_SYSTEM/$SERVICE_FILE_SYSTEMD_SYSTEM"
RemoveFile "$TARGET_HELPER_SERVICE_DIR_SYSTEMD_USER/$SERVICE_FILE_SYSTEMD_USER"
RemoveFile "$TARGET_HELPER_SERVICE_DIR_INIT/$SERVICE_FILE_INIT"
Logger "Skipping configuration files in [$CONF_DIR]. You may remove this directory manually." "NOTICE"
}
function Usage { function Usage {
echo "Installs $PROGRAM into $BIN_DIR" echo "Installs $PROGRAM into $BIN_DIR"
echo "options:" echo "options:"
echo "--silent Will log and bypass user interaction." echo "--silent Will log and bypass user interaction."
echo "--no-stats Used with --silent in order to refuse sending anonymous install stats." echo "--no-stats Used with --silent in order to refuse sending anonymous install stats."
echo "--remove Remove the program."
echo "--prefix=/path Use prefix to install path."
exit 127 exit 127
} }
_LOGGER_SILENT=false ############################## Script entry point
_STATS=1
for i in "$@" function GetCommandlineArguments {
do for i in "$@"; do
case $i in case $i in
--prefix=*)
FAKEROOT="${i##*=}"
;;
--silent) --silent)
_LOGGER_SILENT=true _LOGGER_SILENT=true
;; ;;
--no-stats) --no-stats)
_STATS=0 _STATS=0
;; ;;
--remove)
ACTION="uninstall"
;;
--help|-h|-?) --help|-h|-?)
Usage Usage
;;
*)
Logger "Unknown option '$i'" "ERROR"
Usage
exit
;;
esac esac
done done
}
if [ "$FAKEROOT" != "" ]; then GetCommandlineArguments "$@"
mkdir -p "$SERVICE_DIR_SYSTEMD_SYSTEM" "$SERVICE_DIR_SYSTEMD_USER" "$BIN_DIR"
CONF_DIR=$FAKEROOT/etc/$PROGRAM
BIN_DIR="$FAKEROOT/usr/local/bin"
SERVICE_DIR_INIT=$FAKEROOT/etc/init.d
# Should be /usr/lib/systemd/system, but /lib/systemd/system exists on debian & rhel / fedora
SERVICE_DIR_SYSTEMD_SYSTEM=$FAKEROOT/lib/systemd/system
SERVICE_DIR_SYSTEMD_USER=$FAKEROOT/etc/systemd/user
SERVICE_DIR_OPENRC=$FAKEROOT/etc/init.d
if [ "$PROGRAM" == "osync" ]; then
SERVICE_NAME="osync-srv"
TARGET_HELPER_SERVICE_NAME="osync-target-helper-srv"
TARGET_HELPER_SERVICE_FILE_INIT="$TARGET_HELPER_SERVICE_NAME"
TARGET_HELPER_SERVICE_FILE_SYSTEMD_SYSTEM="$TARGET_HELPER_SERVICE_NAME@.service"
TARGET_HELPER_SERVICE_FILE_SYSTEMD_USER="$TARGET_HELPER_SERVICE_NAME@.service.user"
TARGET_HELPER_SERVICE_FILE_OPENRC="$TARGET_HELPER_SERVICE_NAME-openrc"
elif [ "$PROGRAM" == "pmocr" ]; then
SERVICE_NAME="pmocr-srv"
fi fi
SERVICE_FILE_INIT="$SERVICE_NAME"
SERVICE_FILE_SYSTEMD_SYSTEM="$SERVICE_NAME@.service"
SERVICE_FILE_SYSTEMD_USER="$SERVICE_NAME@.service.user"
SERVICE_FILE_OPENRC="$SERVICE_NAME-openrc"
## Generic code
trap GenericTrapQuit TERM EXIT HUP QUIT
if [ ! -w "$(dirname $LOG_FILE)" ]; then
echo "Cannot write to log [$(dirname $LOG_FILE)]."
else
Logger "Script begin, logging to [$LOG_FILE]." "DEBUG"
fi
# Set default umask
umask 0022
GetLocalOS GetLocalOS
SetLocalOSSettings SetLocalOSSettings
CreateConfDir # On Mac OS this always produces a warning which causes the installer to fail with exit code 2
# Since we know it won't work anyway, and that's fine, just skip this step
if $DO_INIT; then
GetInit
fi
STATS_LINK="http://instcount.netpower.fr?program=$PROGRAM&version=$PROGRAM_VERSION&os=$OS&action=$ACTION"
if [ "$ACTION" == "uninstall" ]; then
RemoveAll
Logger "$PROGRAM uninstalled." "NOTICE"
else
CreateDir "$CONF_DIR"
CreateDir "$BIN_DIR"
CopyExampleFiles CopyExampleFiles
CopyProgram CopyProgram
GetInit if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "pmocr" ]; then
CopyServiceFiles CopyServiceFiles
fi
Logger "$PROGRAM installed. Use with $BIN_DIR/$PROGRAM_BINARY" "NOTICE"
if [ "$PROGRAM" == "osync" ] || [ "$PROGRAM" == "obackup" ]; then
echo ""
Logger "If connecting remotely, consider setup ssh filter to enhance security." "NOTICE"
echo ""
fi
fi
STATS_LINK="http://instcount.netpower.fr?program=$PROGRAM&version=$PROGRAM_VERSION&os=$OS"
QuickLogger "$PROGRAM installed. Use with $BIN_DIR/$PROGRAM"
if [ $_STATS -eq 1 ]; then if [ $_STATS -eq 1 ]; then
if [ $_LOGGER_SILENT == true ]; then if [ $_LOGGER_SILENT == true ]; then
Statistics Statistics
else else
QuickLogger "In order to make install statistics, the script would like to connect to $STATS_LINK" Logger "In order to make usage statistics, the script would like to connect to $STATS_LINK" "NOTICE"
read -r -p "No data except those in the url will be send. Allow [Y/n] " response read -r -p "No data except those in the url will be send. Allow [Y/n] " response
case $response in case $response in
[nN]) [nN])

File diff suppressed because it is too large Load Diff

View File

@ -1,36 +1,38 @@
#!/usr/bin/env bash #!/usr/bin/env bash
## MERGE 2016121901 ## MERGE 2020031501
## Merges ofunctions.sh and n_program.sh into program.sh ## Merges ofunctions.sh and n_program.sh into program.sh
## Adds installer ## Adds installer
function __PREPROCESSOR_Merge { PROGRAM=merge
PROGRAM=obackup INSTANCE_ID=dev
VERSION=$(grep "PROGRAM_VERSION=" n_$PROGRAM.sh)
VERSION=${VERSION#*=}
function Usage {
echo "Merges ofunctions.sh and n_program.sh into debug_program.sh and ../program.sh"
echo "Usage"
echo "$0 osync|obackup|pmocr"
}
function __PREPROCESSOR_Merge {
local nPROGRAM="$1"
if [ -f "$nPROGRAM" ]; then
Logger "$nPROGRAM is not found in local path." "CRITICAL"
exit 1
fi
VERSION=$(grep "PROGRAM_VERSION=" n_$nPROGRAM.sh)
VERSION=${VERSION#*=}
__PREPROCESSOR_Constants __PREPROCESSOR_Constants
source "ofunctions.sh" __PREPROCESSOR_Unexpand "n_$nPROGRAM.sh" "debug_$nPROGRAM.sh"
if [ $? != 0 ]; then
echo "Please run $0 in dev directory with ofunctions.sh"
exit 1
fi
__PREPROCESSOR_Unexpand "n_$PROGRAM.sh" "debug_$PROGRAM.sh"
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "debug_$PROGRAM.sh" __PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "debug_$nPROGRAM.sh"
done done
__PREPROCESSOR_CleanDebug __PREPROCESSOR_CleanDebug "debug_$nPROGRAM.sh" "../$nPROGRAM.sh"
__PREPROCESSOR_CopyCommons
rm -f tmp_$PROGRAM.sh
if [ $? != 0 ]; then
QuickLogger "Cannot remove tmp_$PROGRAM.sh"
exit 1
fi
} }
function __PREPROCESSOR_Constants { function __PREPROCESSOR_Constants {
@ -41,16 +43,26 @@ function __PREPROCESSOR_Constants {
__PREPROCESSOR_SUBSETS=( __PREPROCESSOR_SUBSETS=(
'#### OFUNCTIONS FULL SUBSET ####' '#### OFUNCTIONS FULL SUBSET ####'
'#### OFUNCTIONS MINI SUBSET ####' '#### OFUNCTIONS MINI SUBSET ####'
'#### OFUNCTIONS MICRO SUBSET ####'
'#### PoorMansRandomGenerator SUBSET ####'
'#### _OFUNCTIONS_BOOTSTRAP SUBSET ####' '#### _OFUNCTIONS_BOOTSTRAP SUBSET ####'
'#### RUN_DIR SUBSET ####'
'#### DEBUG SUBSET ####' '#### DEBUG SUBSET ####'
'#### TrapError SUBSET ####' '#### TrapError SUBSET ####'
'#### RemoteLogger SUBSET ####' '#### RemoteLogger SUBSET ####'
'#### QuickLogger SUBSET ####' '#### Logger SUBSET ####'
'#### GetLocalOS SUBSET ####' '#### GetLocalOS SUBSET ####'
'#### IsInteger SUBSET ####' '#### IsInteger SUBSET ####'
'#### UrlEncode SUBSET ####' '#### UrlEncode SUBSET ####'
'#### HumanToNumeric SUBSET ####' '#### HumanToNumeric SUBSET ####'
'#### ArrayContains SUBSET ####' '#### ArrayContains SUBSET ####'
'#### VerComp SUBSET ####'
'#### GetConfFileValue SUBSET ####'
'#### SetConfFileValue SUBSET ####'
'#### CheckRFC822 SUBSET ####'
'#### CleanUp SUBSET ####'
'#### GenericTrapQuit SUBSET ####'
'#### FileMove SUBSET ####'
) )
} }
@ -60,7 +72,7 @@ function __PREPROCESSOR_Unexpand {
unexpand "$source" > "$destination" unexpand "$source" > "$destination"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot unexpand [$source] to [$destination]." Logger "Cannot unexpand [$source] to [$destination]." "CRITICAL"
exit 1 exit 1
fi fi
} }
@ -73,101 +85,120 @@ function __PREPROCESSOR_MergeSubset {
sed -n "/$subsetBegin/,/$subsetEnd/p" "$subsetFile" > "$subsetFile.$subsetBegin" sed -n "/$subsetBegin/,/$subsetEnd/p" "$subsetFile" > "$subsetFile.$subsetBegin"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot sed subset [$subsetBegin -- $subsetEnd] in [$subsetFile]." Logger "Cannot sed subset [$subsetBegin -- $subsetEnd] in [$subsetFile]." "CRTICIAL"
exit 1 exit 1
fi fi
sed "/include $subsetBegin/r $subsetFile.$subsetBegin" "$mergedFile" | grep -v -E "$subsetBegin\$|$subsetEnd\$" > "$mergedFile.tmp" sed "/include $subsetBegin/r $subsetFile.$subsetBegin" "$mergedFile" | grep -v -E "$subsetBegin\$|$subsetEnd\$" > "$mergedFile.tmp"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot add subset [$subsetBegin] to [$mergedFile]." Logger "Cannot add subset [$subsetBegin] to [$mergedFile]." "CRITICAL"
exit 1 exit 1
fi fi
rm -f "$subsetFile.$subsetBegin" rm -f "$subsetFile.$subsetBegin"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot remove temporary subset [$subsetFile.$subsetBegin]." Logger "Cannot remove temporary subset [$subsetFile.$subsetBegin]." "CRITICAL"
exit 1 exit 1
fi fi
rm -f "$mergedFile" rm -f "$mergedFile"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot remove merged original file [$mergedFile]." Logger "Cannot remove merged original file [$mergedFile]." "CRITICAL"
exit 1 exit 1
fi fi
mv "$mergedFile.tmp" "$mergedFile" mv "$mergedFile.tmp" "$mergedFile"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot move merged tmp file to original [$mergedFile]." Logger "Cannot move merged tmp file to original [$mergedFile]." "CRITICAL"
exit 1 exit 1
fi fi
} }
function __PREPROCESSOR_CleanDebug { function __PREPROCESSOR_CleanDebug {
sed '/'$PARANOIA_DEBUG_BEGIN'/,/'$PARANOIA_DEBUG_END'/d' debug_$PROGRAM.sh | grep -v "$PARANOIA_DEBUG_LINE" > ../$PROGRAM.sh local source="${1}"
local destination="${2:-$source}"
sed '/'$PARANOIA_DEBUG_BEGIN'/,/'$PARANOIA_DEBUG_END'/d' "$source" | grep -v "$PARANOIA_DEBUG_LINE" > "$destination.tmp"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot remove PARANOIA_DEBUG code from standard build." Logger "Cannot remove PARANOIA_DEBUG code from standard build." "CRITICAL"
exit 1 exit 1
else
mv -f "$destination.tmp" "$destination"
if [ $? -ne 0 ]; then
Logger "Cannot move [$destination.tmp] to [$destination]." "CRITICAL"
exit 1
fi
fi fi
chmod +x "debug_$PROGRAM.sh" chmod +x "$source"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot chmod debug_$PROGRAM.sh" Logger "Cannot chmod [$source]." "CRITICAL"
exit 1 exit 1
else else
QuickLogger "Prepared ./debug_$PROGRAM.sh" Logger "Prepared [$source]." "NOTICE"
fi fi
chmod +x "../$PROGRAM.sh"
if [ "$source" != "$destination" ]; then
chmod +x "$destination"
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot chmod $PROGRAM.sh" Logger "Cannot chmod [$destination]." "CRITICAL"
exit 1 exit 1
else else
QuickLogger "Prepared ../$PROGRAM.sh" Logger "Prepared [$destination]." "NOTICE"
fi
fi fi
} }
function __PREPROCESSOR_CopyCommons { function __PREPROCESSOR_CopyCommons {
sed "s/\[prgname\]/$PROGRAM/g" common_install.sh > ../tmp_install.sh local nPROGRAM="$1"
sed "s/\[prgname\]/$nPROGRAM/g" common_install.sh > ../install.sh
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot assemble install." Logger "Cannot assemble install." "CRITICAL"
exit 1 exit 1
fi fi
for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
__PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../tmp_install.sh" __PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../install.sh"
done done
sed "s/\[version\]/$VERSION/g" ../tmp_install.sh > ../install.sh __PREPROCESSOR_CleanDebug "../install.sh"
if [ $? != 0 ]; then
QuickLogger "Cannot change install version."
exit 1
fi
if [ -f "common_batch.sh" ]; then if [ -f "common_batch.sh" ]; then
sed "s/\[prgname\]/$PROGRAM/g" common_batch.sh > ../$PROGRAM-batch.sh sed "s/\[prgname\]/$nPROGRAM/g" common_batch.sh > ../$nPROGRAM-batch.sh
if [ $? != 0 ]; then if [ $? != 0 ]; then
QuickLogger "Cannot assemble batch runner." Logger "Cannot assemble batch runner." "CRITICAL"
exit 1 exit 1
fi fi
chmod +x ../$PROGRAM-batch.sh
if [ $? != 0 ]; then for subset in "${__PREPROCESSOR_SUBSETS[@]}"; do
QuickLogger "Cannot chmod $PROGRAM-batch.sh" __PREPROCESSOR_MergeSubset "$subset" "${subset//SUBSET/SUBSET END}" "ofunctions.sh" "../$nPROGRAM-batch.sh"
exit 1 done
else
QuickLogger "Prepared ../$PROGRAM-batch.sh" __PREPROCESSOR_CleanDebug "../$nPROGRAM-batch.sh"
fi
fi
chmod +x ../install.sh
if [ $? != 0 ]; then
QuickLogger "Cannot chmod install.sh"
exit 1
else
QuickLogger "Prepared ../install.sh"
fi
rm -f ../tmp_install.sh
if [ $? != 0 ]; then
QuickLogger "Cannot chmod $PROGRAM.sh"
exit 1
fi fi
} }
# If sourced don't do anything # If sourced don't do anything
if [ "$(basename $0)" == "merge.sh" ]; then if [ "$(basename $0)" == "merge.sh" ]; then
__PREPROCESSOR_Merge source "./ofunctions.sh"
if [ $? != 0 ]; then
echo "Please run $0 in dev directory with ofunctions.sh"
exit 1
fi
trap GenericTrapQuit TERM EXIT HUP QUIT
if [ "$1" == "osync" ]; then
__PREPROCESSOR_Merge osync
__PREPROCESSOR_CopyCommons osync
elif [ "$1" == "obackup" ]; then
__PREPROCESSOR_Merge obackup
__PREPROCESSOR_CopyCommons obackup
elif [ "$1" == "pmocr" ]; then
__PREPROCESSOR_Merge pmocr
__PREPROCESSOR_CopyCommons pmocr
else
echo "No valid program given."
Usage
exit 1
fi
fi fi

614
dev/n_obackup.sh Executable file → Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -4,5 +4,7 @@
#SC1091 = not following source #SC1091 = not following source
#SC2086 = quoting errors (shellcheck is way too picky about quoting) #SC2086 = quoting errors (shellcheck is way too picky about quoting)
#SC2120 = only for debug version #SC2120 = only for debug version
#SC2034 = unused variabled (can be ignored in ofunctions.sh)
#SC2068 = bad array usage (can be ignored in ofunctions.sh)
shellcheck -e SC1090,SC1091,SC2086,SC2119,SC2120 $1 shellcheck -e SC1090,SC1091,SC2086,SC2119,SC2120 $@

View File

@ -2,9 +2,9 @@
###### obackup - Local or Remote, push or pull backup script for files & mysql databases ###### obackup - Local or Remote, push or pull backup script for files & mysql databases
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr) ###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
###### obackup v2.1x config file rev 2016081701
###### GENERAL BACKUP OPTIONS [GENERAL]
CONFIG_FILE_REVISION=2.1
## Backup identification string. ## Backup identification string.
INSTANCE_ID="local-test" INSTANCE_ID="local-test"
@ -13,27 +13,29 @@ INSTANCE_ID="local-test"
LOGFILE="" LOGFILE=""
## Elements to backup ## Elements to backup
SQL_BACKUP=yes SQL_BACKUP=no
FILE_BACKUP=yes FILE_BACKUP=yes
## Backups can be done local, pulled from another server or pushed to a backup server. Available options are [local,pull,push]. ## Backups can be done local, pulled from another server or pushed to a backup server. Available options are [local,pull,push].
## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server. ## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server.
BACKUP_TYPE=local BACKUP_TYPE=local
###### BACKUP STORAGE [BACKUP STORAGE]
## Storage paths of the backups (absolute paths of the local or remote system) ## Storage paths of the backups (absolute paths of the local or remote system)
SQL_STORAGE="${HOME}/obackup-storage/sql-local" SQL_STORAGE="${HOME}/obackup-storage/sql-local"
FILE_STORAGE="${HOME}/obackup-storage/files-local" FILE_STORAGE="${HOME}/obackup-storage/files-local"
## Encryption ## Encryption
ENCRYPTION=yes ENCRYPTION=no
## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system) ## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system)
CRYPT_STORAGE="${HOME}/obackup-storage/crypt-local" CRYPT_STORAGE="${HOME}/obackup-storage/crypt-local"
## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys ## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys
GPG_RECIPIENT="John Doe" GPG_RECIPIENT="John Doe"
PARALLEL_ENCRYPTION_PROCESSES=""
## Create backup directories if they do not exist ## Create backup directories if they do not exist
CREATE_DIRS=yes CREATE_DIRS=yes
@ -53,13 +55,17 @@ GET_BACKUP_SIZE=yes
SQL_WARN_MIN_SPACE=1048576 SQL_WARN_MIN_SPACE=1048576
FILE_WARN_MIN_SPACE=1048576 FILE_WARN_MIN_SPACE=1048576
###### REMOTE ONLY OPTIONS [REMOTE_OPTIONS]
## In case of pulled or pushed backups, remote system URI needs to be supplied. ## In case of pulled or pushed backups, remote system URI needs to be supplied.
REMOTE_SYSTEM_URI="ssh://backupuser@remote.system.tld:22/" REMOTE_SYSTEM_URI="ssh://backupuser@remote.system.tld:22/"
## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information. ## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa" SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa"
SSH_PASSWORD_FILE=""
_REMOTE_TOKEN="SomeAlphaNumericToken9"
## ssh compression should be used unless your remote connection is good enough (LAN) ## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes SSH_COMPRESSION=yes
@ -79,7 +85,7 @@ REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled. ## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled.
SUDO_EXEC=no SUDO_EXEC=no
###### DATABASE SPECIFIC OPTIONS [DATABASE BACKUP SETTINGS]
## Database backup user ## Database backup user
SQL_USER=root SQL_USER=root
@ -87,9 +93,9 @@ SQL_USER=root
## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list. ## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list.
## Every found database will be backed up as separate backup task. ## Every found database will be backed up as separate backup task.
DATABASES_ALL=yes DATABASES_ALL=yes
DATABASES_ALL_EXCLUDE_LIST="test;information_schema;zarafa_prod" DATABASES_ALL_EXCLUDE_LIST="test;information_schema;kopano_prod"
## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by spaces. ## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by spaces.
DATABASES_LIST="mysql" DATABASES_LIST=mysql
## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task. ## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task.
## If a task gets stopped, next one in the task list gets executed. Time is specified in seconds. ## If a task gets stopped, next one in the task list gets executed. Time is specified in seconds.
@ -104,7 +110,7 @@ MYSQLDUMP_OPTIONS="--opt --single-transaction"
## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable. ## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable.
COMPRESSION_LEVEL=3 COMPRESSION_LEVEL=3
###### FILES SPECIFIC OPTIONS [FILE BACKUP SETTINGS]
## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task. ## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task.
## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task. ## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task.
@ -112,8 +118,8 @@ COMPRESSION_LEVEL=3
## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST. ## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST.
## Directories backup list. List of semicolon separated directories that will be backed up. ## Directories backup list. List of semicolon separated directories that will be backed up.
DIRECTORY_LIST="${HOME}/obackup-testdata/testData" DIRECTORY_LIST=/root/obackup-testdata/nonPresentData
RECURSIVE_DIRECTORY_LIST="${HOME}/obackup-testdata/testDataRecursive" RECURSIVE_DIRECTORY_LIST=/root/obackup-testdata/nonPresentDataRecursive
RECURSIVE_EXCLUDE_LIST="${HOME}/obackup-testdata/testDataRecursive/Excluded" RECURSIVE_EXCLUDE_LIST="${HOME}/obackup-testdata/testDataRecursive/Excluded"
## Rsync exclude / include order (the option set here will be set first, eg: include will make include then exclude patterns) ## Rsync exclude / include order (the option set here will be set first, eg: include will make include then exclude patterns)
@ -134,6 +140,8 @@ RSYNC_EXCLUDE_FROM=""
## List separator char. You may set an alternative separator char for your directories lists above. ## List separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";" PATH_SEPARATOR_CHAR=";"
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions ## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes PRESERVE_PERMISSIONS=yes
@ -177,10 +185,12 @@ BANDWIDTH=0
## Paranoia option. Don't change this unless you read the documentation. ## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync RSYNC_EXECUTABLE=rsync
###### ALERT OPTIONS [ALERT_OPTIONS]
## Alert email addresses separated by a space character ## Alert email addresses separated by a space character
DESTINATION_MAILS="" DESTINATION_MAILS=""
MAIL_BODY_CHARSET=""
## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/ ## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/
SENDER_MAIL="alert@your.system.tld" SENDER_MAIL="alert@your.system.tld"
@ -191,7 +201,7 @@ SMTP_ENCRYPTION=none
SMTP_USER= SMTP_USER=
SMTP_PASSWORD= SMTP_PASSWORD=
###### GENERAL BACKUP OPTIONS [BACKUP SETTINGS]
## Max execution time of whole backup process. Soft max exec time generates a warning only. ## Max execution time of whole backup process. Soft max exec time generates a warning only.
## Hard max exec time generates a warning and stops the whole backup execution. ## Hard max exec time generates a warning and stops the whole backup execution.
@ -207,7 +217,7 @@ ROTATE_SQL_COPIES=7
ROTATE_FILE_BACKUPS=yes ROTATE_FILE_BACKUPS=yes
ROTATE_FILE_COPIES=7 ROTATE_FILE_COPIES=7
###### EXECUTION HOOKS [EXECUTION_HOOKS]
## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set). ## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set).
## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data. ## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data.

View File

@ -2,9 +2,9 @@
###### obackup - Local or Remote, push or pull backup script for files & mysql databases ###### obackup - Local or Remote, push or pull backup script for files & mysql databases
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr) ###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
###### obackup v2.1x config file rev 2016081701
###### GENERAL BACKUP OPTIONS [GENERAL]
CONFIG_FILE_REVISION=2.1
## Backup identification string. ## Backup identification string.
INSTANCE_ID="local-test" INSTANCE_ID="local-test"
@ -20,7 +20,7 @@ FILE_BACKUP=yes
## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server. ## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server.
BACKUP_TYPE=local BACKUP_TYPE=local
###### BACKUP STORAGE [BACKUP STORAGE]
## Storage paths of the backups (absolute paths of the local or remote system) ## Storage paths of the backups (absolute paths of the local or remote system)
SQL_STORAGE="${HOME}/obackup-storage/sql" SQL_STORAGE="${HOME}/obackup-storage/sql"
@ -28,6 +28,12 @@ FILE_STORAGE="${HOME}/obackup-storage/files"
## Backup encryption using GPG and duplicity. Feature not ready yet. ## Backup encryption using GPG and duplicity. Feature not ready yet.
ENCRYPTION=no ENCRYPTION=no
CRYPT_STORAGE="/home/storage/crypt"
GPG_RECIPIENT="Your Name used with GPG signature"
PARALLEL_ENCRYPTION_PROCESSES=""
## Create backup directories if they do not exist ## Create backup directories if they do not exist
CREATE_DIRS=yes CREATE_DIRS=yes
@ -47,13 +53,17 @@ GET_BACKUP_SIZE=yes
SQL_WARN_MIN_SPACE=1048576 SQL_WARN_MIN_SPACE=1048576
FILE_WARN_MIN_SPACE=1048576 FILE_WARN_MIN_SPACE=1048576
###### REMOTE ONLY OPTIONS [REMOTE_OPTIONS]
## In case of pulled or pushed backups, remote system URI needs to be supplied. ## In case of pulled or pushed backups, remote system URI needs to be supplied.
REMOTE_SYSTEM_URI="ssh://backupuser@remote.system.tld:22/" REMOTE_SYSTEM_URI="ssh://backupuser@remote.system.tld:22/"
## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information. ## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa" SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa"
SSH_PASSWORD_FILE=""
_REMOTE_TOKEN="SomeAlphaNumericToken9"
## ssh compression should be used unless your remote connection is good enough (LAN) ## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes SSH_COMPRESSION=yes
@ -73,7 +83,7 @@ REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled. ## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled.
SUDO_EXEC=no SUDO_EXEC=no
###### DATABASE SPECIFIC OPTIONS [DATABASE BACKUP SETTINGS]
## Database backup user ## Database backup user
SQL_USER=root SQL_USER=root
@ -98,7 +108,7 @@ MYSQLDUMP_OPTIONS="--opt --single-transaction"
## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable. ## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable.
COMPRESSION_LEVEL=3 COMPRESSION_LEVEL=3
###### FILES SPECIFIC OPTIONS [FILE BACKUP SETTINGS]
## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task. ## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task.
## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task. ## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task.
@ -128,6 +138,8 @@ RSYNC_EXCLUDE_FROM=""
## List separator char. You may set an alternative separator char for your directories lists above. ## List separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";" PATH_SEPARATOR_CHAR=";"
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions ## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes PRESERVE_PERMISSIONS=yes
@ -171,10 +183,12 @@ BANDWIDTH=0
## Paranoia option. Don't change this unless you read the documentation. ## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync RSYNC_EXECUTABLE=rsync
###### ALERT OPTIONS [ALERT_OPTIONS]
## Alert email addresses separated by a space character ## Alert email addresses separated by a space character
DESTINATION_MAILS="" DESTINATION_MAILS=""
MAIL_BODY_CHARSET=""
## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/ ## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/
SENDER_MAIL="alert@your.system.tld" SENDER_MAIL="alert@your.system.tld"
@ -185,7 +199,7 @@ SMTP_ENCRYPTION=none
SMTP_USER= SMTP_USER=
SMTP_PASSWORD= SMTP_PASSWORD=
###### GENERAL BACKUP OPTIONS [BACKUP SETTINGS]
## Max execution time of whole backup process. Soft max exec time generates a warning only. ## Max execution time of whole backup process. Soft max exec time generates a warning only.
## Hard max exec time generates a warning and stops the whole backup execution. ## Hard max exec time generates a warning and stops the whole backup execution.
@ -201,7 +215,7 @@ ROTATE_SQL_COPIES=7
ROTATE_FILE_BACKUPS=no ROTATE_FILE_BACKUPS=no
ROTATE_FILE_COPIES=7 ROTATE_FILE_COPIES=7
###### EXECUTION HOOKS [EXECUTION_HOOKS]
## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set). ## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set).
## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data. ## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data.

View File

@ -29,7 +29,7 @@ RSYNC_EXECUTABLE=rsync
## Remote options (will make backups of remote system through ssh tunnel, public RSA key need to be put into /home/.ssh/authorized_keys in remote users home directory) ## Remote options (will make backups of remote system through ssh tunnel, public RSA key need to be put into /home/.ssh/authorized_keys in remote users home directory)
REMOTE_BACKUP=yes REMOTE_BACKUP=yes
SSH_RSA_PRIVATE_KEY=${HOME}/.ssh/id_rsa_local SSH_RSA_PRIVATE_KEY=${HOME}/.ssh/id_rsa_local_obackup_tests
REMOTE_USER=root REMOTE_USER=root
REMOTE_HOST=localhost REMOTE_HOST=localhost
REMOTE_PORT=22 REMOTE_PORT=22
@ -44,7 +44,7 @@ REMOTE_3RD_PARTY_HOST="www.kernel.org www.google.com"
SQL_USER=root SQL_USER=root
## Save all databases except the ones specified in the exlude list. Every found database will be backed up as separate task (see documentation for explanation about tasks) ## Save all databases except the ones specified in the exlude list. Every found database will be backed up as separate task (see documentation for explanation about tasks)
DATABASES_ALL=yes DATABASES_ALL=yes
DATABASES_ALL_EXCLUDE_LIST="test;information_schema;zarafa_prod" DATABASES_ALL_EXCLUDE_LIST="test;information_schema;kopano_prod"
# Alternatively, you can specifiy a manual list of databases to backup separated by spaces # Alternatively, you can specifiy a manual list of databases to backup separated by spaces
DATABASES_LIST="" DATABASES_LIST=""
## Max backup execution time per DB task. Soft is warning only. Hard is warning, stopping backup task and processing next one. Time is specified in seconds ## Max backup execution time per DB task. Soft is warning only. Hard is warning, stopping backup task and processing next one. Time is specified in seconds

View File

@ -2,9 +2,9 @@
###### obackup - Local or Remote, push or pull backup script for files & mysql databases ###### obackup - Local or Remote, push or pull backup script for files & mysql databases
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr) ###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
###### obackup v2.1x config file rev 2016081701
###### GENERAL BACKUP OPTIONS [GENERAL]
CONFIG_FILE_REVISION=2.1
## Backup identification string. ## Backup identification string.
INSTANCE_ID="pull-test" INSTANCE_ID="pull-test"
@ -20,20 +20,22 @@ FILE_BACKUP=yes
## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server. ## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server.
BACKUP_TYPE=pull BACKUP_TYPE=pull
###### BACKUP STORAGE [BACKUP STORAGE]
## Storage paths of the backups (absolute paths of the local or remote system) ## Storage paths of the backups (absolute paths of the local or remote system)
SQL_STORAGE="${HOME}/obackup-storage/sql-pull" SQL_STORAGE="${HOME}/obackup-storage/sql-pull"
FILE_STORAGE="${HOME}/obackup-storage/files-pull" FILE_STORAGE="${HOME}/obackup-storage/files-pull"
## Encryption ## Encryption
ENCRYPTION=yes ENCRYPTION=no
## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system) ## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system)
CRYPT_STORAGE="${HOME}/obackup-storage/crypt-pull" CRYPT_STORAGE="${HOME}/obackup-storage/crypt-pull"
## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys ## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys
GPG_RECIPIENT="John Doe" GPG_RECIPIENT="John Doe"
PARALLEL_ENCRYPTION_PROCESSES=""
## Create backup directories if they do not exist ## Create backup directories if they do not exist
CREATE_DIRS=yes CREATE_DIRS=yes
@ -53,13 +55,18 @@ GET_BACKUP_SIZE=yes
SQL_WARN_MIN_SPACE=1048576 SQL_WARN_MIN_SPACE=1048576
FILE_WARN_MIN_SPACE=1048576 FILE_WARN_MIN_SPACE=1048576
###### REMOTE ONLY OPTIONS [REMOTE_OPTIONS]
## In case of pulled or pushed backups, remote system URI needs to be supplied. ## In case of pulled or pushed backups, remote system URI needs to be supplied.
REMOTE_SYSTEM_URI="ssh://root@localhost:49999/" REMOTE_SYSTEM_URI="ssh://root@localhost:22/"
## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information. ## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local" SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local_obackup_tests"
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
SSH_PASSWORD_FILE=""
_REMOTE_TOKEN=SomeAlphaNumericToken9
## ssh compression should be used unless your remote connection is good enough (LAN) ## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes SSH_COMPRESSION=yes
@ -79,7 +86,7 @@ REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled. ## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled.
SUDO_EXEC=no SUDO_EXEC=no
###### DATABASE SPECIFIC OPTIONS [DATABASE BACKUP SETTINGS]
## Database backup user ## Database backup user
SQL_USER=root SQL_USER=root
@ -87,9 +94,9 @@ SQL_USER=root
## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list. ## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list.
## Every found database will be backed up as separate backup task. ## Every found database will be backed up as separate backup task.
DATABASES_ALL=yes DATABASES_ALL=yes
DATABASES_ALL_EXCLUDE_LIST="test;information_schema;zarafa_prod" DATABASES_ALL_EXCLUDE_LIST="test;information_schema;kopano_prod"
## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by spaces. ## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by spaces.
DATABASES_LIST="mysql" DATABASES_LIST=mysql
## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task. ## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task.
## If a task gets stopped, next one in the task list gets executed. Time is specified in seconds. ## If a task gets stopped, next one in the task list gets executed. Time is specified in seconds.
@ -104,7 +111,7 @@ MYSQLDUMP_OPTIONS="--opt --single-transaction"
## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable. ## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable.
COMPRESSION_LEVEL=3 COMPRESSION_LEVEL=3
###### FILES SPECIFIC OPTIONS [FILE BACKUP SETTINGS]
## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task. ## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task.
## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task. ## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task.
@ -112,8 +119,8 @@ COMPRESSION_LEVEL=3
## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST. ## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST.
## Directories backup list. List of semicolon separated directories that will be backed up. ## Directories backup list. List of semicolon separated directories that will be backed up.
DIRECTORY_LIST="${HOME}/obackup-testdata/testData" DIRECTORY_LIST=/root/obackup-testdata/testData
RECURSIVE_DIRECTORY_LIST="${HOME}/obackup-testdata/testDataRecursive" RECURSIVE_DIRECTORY_LIST=/root/obackup-testdata/testDataRecursive
RECURSIVE_EXCLUDE_LIST="${HOME}/obackup-testdata/testDataRecursive/Excluded" RECURSIVE_EXCLUDE_LIST="${HOME}/obackup-testdata/testDataRecursive/Excluded"
## Rsync exclude / include order (the option set here will be set first, eg: include will make include then exclude patterns) ## Rsync exclude / include order (the option set here will be set first, eg: include will make include then exclude patterns)
@ -134,6 +141,8 @@ RSYNC_EXCLUDE_FROM=""
## List separator char. You may set an alternative separator char for your directories lists above. ## List separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";" PATH_SEPARATOR_CHAR=";"
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions ## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes PRESERVE_PERMISSIONS=yes
@ -177,10 +186,12 @@ BANDWIDTH=0
## Paranoia option. Don't change this unless you read the documentation. ## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync RSYNC_EXECUTABLE=rsync
###### ALERT OPTIONS [ALERT_OPTIONS]
## Alert email addresses separated by a space character ## Alert email addresses separated by a space character
DESTINATION_MAILS="" DESTINATION_MAILS=""
MAIL_BODY_CHARSET=""
## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/ ## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/
SENDER_MAIL="alert@your.system.tld" SENDER_MAIL="alert@your.system.tld"
@ -191,7 +202,7 @@ SMTP_ENCRYPTION=none
SMTP_USER= SMTP_USER=
SMTP_PASSWORD= SMTP_PASSWORD=
###### GENERAL BACKUP OPTIONS [BACKUP SETTINGS]
## Max execution time of whole backup process. Soft max exec time generates a warning only. ## Max execution time of whole backup process. Soft max exec time generates a warning only.
## Hard max exec time generates a warning and stops the whole backup execution. ## Hard max exec time generates a warning and stops the whole backup execution.
@ -207,7 +218,7 @@ ROTATE_SQL_COPIES=7
ROTATE_FILE_BACKUPS=yes ROTATE_FILE_BACKUPS=yes
ROTATE_FILE_COPIES=7 ROTATE_FILE_COPIES=7
###### EXECUTION HOOKS [EXECUTION_HOOKS]
## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set). ## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set).
## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data. ## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data.

View File

@ -2,9 +2,9 @@
###### obackup - Local or Remote, push or pull backup script for files & mysql databases ###### obackup - Local or Remote, push or pull backup script for files & mysql databases
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr) ###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr)
###### obackup v2.1x config file rev 2016081701
###### GENERAL BACKUP OPTIONS [GENERAL]
CONFIG_FILE_REVISION=2.1
## Backup identification string. ## Backup identification string.
INSTANCE_ID="push-test" INSTANCE_ID="push-test"
@ -20,20 +20,22 @@ FILE_BACKUP=yes
## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server. ## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server.
BACKUP_TYPE=push BACKUP_TYPE=push
###### BACKUP STORAGE [BACKUP STORAGE]
## Storage paths of the backups (absolute paths of the local or remote system) ## Storage paths of the backups (absolute paths of the local or remote system)
SQL_STORAGE="${HOME}/obackup-storage/sql-push" SQL_STORAGE="${HOME}/obackup-storage/sql-push"
FILE_STORAGE="${HOME}/obackup-storage/files-push" FILE_STORAGE="${HOME}/obackup-storage/files-push"
## Encryption ## Encryption
ENCRYPTION=yes ENCRYPTION=no
## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system) ## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system)
CRYPT_STORAGE="${HOME}/obackup-storage/crypt-push" CRYPT_STORAGE="${HOME}/obackup-storage/crypt-push"
## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys ## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys
GPG_RECIPIENT="John Doe" GPG_RECIPIENT="John Doe"
PARALLEL_ENCRYPTION_PROCESSES=""
## Create backup directories if they do not exist ## Create backup directories if they do not exist
CREATE_DIRS=yes CREATE_DIRS=yes
@ -53,13 +55,18 @@ GET_BACKUP_SIZE=yes
SQL_WARN_MIN_SPACE=1048576 SQL_WARN_MIN_SPACE=1048576
FILE_WARN_MIN_SPACE=1048576 FILE_WARN_MIN_SPACE=1048576
###### REMOTE ONLY OPTIONS [REMOTE_OPTIONS]
## In case of pulled or pushed backups, remote system URI needs to be supplied. ## In case of pulled or pushed backups, remote system URI needs to be supplied.
REMOTE_SYSTEM_URI="ssh://root@localhost:49999/" REMOTE_SYSTEM_URI="ssh://root@localhost:22/"
## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information. ## You can specify a RSA key (please use full path). If not defined, the default ~/.ssh/id_rsa will be used. See documentation for further information.
SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local" SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa_local_obackup_tests"
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
SSH_PASSWORD_FILE=""
_REMOTE_TOKEN=SomeAlphaNumericToken9
## ssh compression should be used unless your remote connection is good enough (LAN) ## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes SSH_COMPRESSION=yes
@ -79,7 +86,7 @@ REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled. ## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled.
SUDO_EXEC=no SUDO_EXEC=no
###### DATABASE SPECIFIC OPTIONS [DATABASE BACKUP SETTINGS]
## Database backup user ## Database backup user
SQL_USER=root SQL_USER=root
@ -87,9 +94,9 @@ SQL_USER=root
## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list. ## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list.
## Every found database will be backed up as separate backup task. ## Every found database will be backed up as separate backup task.
DATABASES_ALL=yes DATABASES_ALL=yes
DATABASES_ALL_EXCLUDE_LIST="test;information_schema;zarafa_prod" DATABASES_ALL_EXCLUDE_LIST="test;information_schema;kopano_prod"
## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by spaces. ## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by spaces.
DATABASES_LIST="mysql" DATABASES_LIST=mysql
## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task. ## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task.
## If a task gets stopped, next one in the task list gets executed. Time is specified in seconds. ## If a task gets stopped, next one in the task list gets executed. Time is specified in seconds.
@ -104,7 +111,7 @@ MYSQLDUMP_OPTIONS="--opt --single-transaction"
## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable. ## Generally, level 5 is a good compromise between cpu, memory hunger and compress ratio. Gzipped files are set to be rsyncable.
COMPRESSION_LEVEL=3 COMPRESSION_LEVEL=3
###### FILES SPECIFIC OPTIONS [FILE BACKUP SETTINGS]
## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task. ## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task.
## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task. ## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task.
@ -112,8 +119,8 @@ COMPRESSION_LEVEL=3
## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST. ## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST.
## Directories backup list. List of semicolon separated directories that will be backed up. ## Directories backup list. List of semicolon separated directories that will be backed up.
DIRECTORY_LIST="${HOME}/obackup-testdata/testData" DIRECTORY_LIST=/root/obackup-testdata/testData
RECURSIVE_DIRECTORY_LIST="${HOME}/obackup-testdata/testDataRecursive" RECURSIVE_DIRECTORY_LIST=/root/obackup-testdata/testDataRecursive
RECURSIVE_EXCLUDE_LIST="${HOME}/obackup-testdata/testDataRecursive/Excluded" RECURSIVE_EXCLUDE_LIST="${HOME}/obackup-testdata/testDataRecursive/Excluded"
## Rsync exclude / include order (the option set here will be set first, eg: include will make include then exclude patterns) ## Rsync exclude / include order (the option set here will be set first, eg: include will make include then exclude patterns)
@ -134,6 +141,8 @@ RSYNC_EXCLUDE_FROM=""
## List separator char. You may set an alternative separator char for your directories lists above. ## List separator char. You may set an alternative separator char for your directories lists above.
PATH_SEPARATOR_CHAR=";" PATH_SEPARATOR_CHAR=";"
RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions ## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes PRESERVE_PERMISSIONS=yes
@ -177,10 +186,12 @@ BANDWIDTH=0
## Paranoia option. Don't change this unless you read the documentation. ## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync RSYNC_EXECUTABLE=rsync
###### ALERT OPTIONS [ALERT_OPTIONS]
## Alert email addresses separated by a space character ## Alert email addresses separated by a space character
DESTINATION_MAILS="" DESTINATION_MAILS=""
MAIL_BODY_CHARSET=""
## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/ ## Windows specific (msys / cygwin environment) only mail options (used with mailsend.exe from muquit, http://github.com/muquit/mailsend or from sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail/
SENDER_MAIL="alert@your.system.tld" SENDER_MAIL="alert@your.system.tld"
@ -191,7 +202,7 @@ SMTP_ENCRYPTION=none
SMTP_USER= SMTP_USER=
SMTP_PASSWORD= SMTP_PASSWORD=
###### GENERAL BACKUP OPTIONS [BACKUP SETTINGS]
## Max execution time of whole backup process. Soft max exec time generates a warning only. ## Max execution time of whole backup process. Soft max exec time generates a warning only.
## Hard max exec time generates a warning and stops the whole backup execution. ## Hard max exec time generates a warning and stops the whole backup execution.
@ -207,7 +218,7 @@ ROTATE_SQL_COPIES=7
ROTATE_FILE_BACKUPS=yes ROTATE_FILE_BACKUPS=yes
ROTATE_FILE_COPIES=7 ROTATE_FILE_COPIES=7
###### EXECUTION HOOKS [EXECUTION_HOOKS]
## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set). ## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set).
## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data. ## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data.

View File

@ -1,8 +1,22 @@
#!/usr/bin/env bash #!/usr/bin/env bash
#TODO Encrypted Pull runs on F25 fail for decryption ## obackup basic tests suite 2020050401
## obackup basic tests suite 2017010201 # Supported environment variables
# TRAVIS_RUN=[true|false]
# SSH_PORT=22
# SKIP_REMOTE=[true|false]
#TODO Encrypted Pull runs on F25 fail for decryption
# If gpg key generation hangs, please install and configure rngd service
#eg yum install rng-tools ; systemctl start rngd
# or yum install epel-release && yum innstall haveged && systemctl start haveged
if [ "$SKIP_REMOTE" == "" ]; then
SKIP_REMOTE=false
fi
OBACKUP_DIR="$(pwd)" OBACKUP_DIR="$(pwd)"
OBACKUP_DIR=${OBACKUP_DIR%%/dev*} OBACKUP_DIR=${OBACKUP_DIR%%/dev*}
@ -62,6 +76,7 @@ DATABASE_EXCLUDED="information_schema.sql.xz"
CRYPT_EXTENSION=".obackup.gpg" CRYPT_EXTENSION=".obackup.gpg"
ROTATE_1_EXTENSION=".obackup.1" ROTATE_1_EXTENSION=".obackup.1"
ROTATE_2_EXTENSION=".obackup.2"
PASSFILE="passfile" PASSFILE="passfile"
CRYPT_TESTFILE="testfile" CRYPT_TESTFILE="testfile"
@ -71,42 +86,10 @@ OBACKUP_VERSION=2.x
OBACKUP_MIN_VERSION=x OBACKUP_MIN_VERSION=x
OBACKUP_IS_STABLE=maybe OBACKUP_IS_STABLE=maybe
# Setup an array with all function modes
#declare -Ag osyncParameters
function GetConfFileValue () {
local file="${1}"
local name="${2}"
local value
value=$(grep "^$name=" "$file")
if [ $? == 0 ]; then
value="${value##*=}"
echo "$value"
else
assertEquals "$name does not exist in [$file]." "1" "0"
fi
}
function SetConfFileValue () {
local file="${1}"
local name="${2}"
local value="${3}"
if grep "^$name=" "$file" > /dev/null; then
# Using -i.tmp for BSD compat
sed -i.tmp "s#^$name=.*#$name=$value#" "$file"
rm -f "$file.tmp"
assertEquals "Set $name to [$value]." "0" $?
else
assertEquals "$name does not exist in [$file]." "1" "0"
fi
}
function SetupSSH { function SetupSSH {
echo -e 'y\n'| ssh-keygen -t rsa -b 2048 -N "" -f "${HOME}/.ssh/id_rsa_local" echo -e 'y\n'| ssh-keygen -t rsa -b 2048 -N "" -f "${HOME}/.ssh/id_rsa_local_obackup_tests"
if ! grep "$(cat ${HOME}/.ssh/id_rsa_local.pub)" "${HOME}/.ssh/authorized_keys"; then if ! grep "$(cat ${HOME}/.ssh/id_rsa_local_obackup_tests.pub)" "${HOME}/.ssh/authorized_keys"; then
cat "${HOME}/.ssh/id_rsa_local.pub" >> "${HOME}/.ssh/authorized_keys" echo "from=\"*\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty,command=\"/usr/local/bin/ssh_filter.sh SomeAlphaNumericToken9\" $(cat ${HOME}/.ssh/id_rsa_local_obackup_tests.pub)" >> "${HOME}/.ssh/authorized_keys"
fi fi
chmod 600 "${HOME}/.ssh/authorized_keys" chmod 600 "${HOME}/.ssh/authorized_keys"
@ -121,13 +104,10 @@ function SetupSSH {
} }
function RemoveSSH { function RemoveSSH {
local pubkey if [ -f "${HOME}/.ssh/id_rsa_local_obackup_tests" ]; then
echo "Restoring SSH authorized_keys file"
if [ -f "${HOME}/.ssh/id_rsa_local" ]; then sed -i.bak "s|.*$(cat "${HOME}/.ssh/id_rsa_local_obackup_tests.pub")||g" "${HOME}/.ssh/authorized_keys"
rm -f "${HOME}/.ssh/{id_rsa_local_obackup_tests.pub,id_rsa_local_obackup_tests}"
pubkey=$(cat "${HOME}/.ssh/id_rsa_local.pub")
sed -i.bak "#$pubkey#d" "${HOME}/.ssh/authorized_keys"
rm -f "${HOME}/.ssh/{id_rsa_local.pub,id_rsa_local}"
fi fi
} }
@ -138,6 +118,8 @@ function SetupGPG {
CRYPT_TOOL=gpg CRYPT_TOOL=gpg
else else
echo "No gpg support" echo "No gpg support"
assertEquals "Failed to detect gpg" "1" $?
return
fi fi
echo "Crypt tool=$CRYPT_TOOL" echo "Crypt tool=$CRYPT_TOOL"
@ -157,17 +139,18 @@ Passphrase: PassPhrase123
%commit %commit
%echo done %echo done
EOF EOF
if [ "$TRAVIS_RUN" == true ]; then
if type apt-get > /dev/null 2>&1; then if type apt-get > /dev/null 2>&1; then
sudo apt-get install rng-tools sudo apt-get install rng-tools
fi fi
# Setup fast entropy # Setup fast entropy
if type rngd > /dev/null 2>&1; then if type rngd > /dev/null 2>&1; then
rngd -r /dev/urandom $SUDO_CMD rngd -r /dev/urandom
else else
echo "No rngd support" echo "No rngd support"
fi fi
fi
$CRYPT_TOOL --batch --gen-key gpgcommand $CRYPT_TOOL --batch --gen-key gpgcommand
echo "Currently owned $CRYPT_TOOL keys" echo "Currently owned $CRYPT_TOOL keys"
@ -183,6 +166,10 @@ function oneTimeSetUp () {
START_TIME=$SECONDS START_TIME=$SECONDS
source "$DEV_DIR/ofunctions.sh" source "$DEV_DIR/ofunctions.sh"
# Set default umask because ofunctions set 0077
umask 0022
GetLocalOS GetLocalOS
echo "Detected OS: $LOCAL_OS" echo "Detected OS: $LOCAL_OS"
@ -191,27 +178,27 @@ function oneTimeSetUp () {
if [ "$TRAVIS_RUN" == true ]; then if [ "$TRAVIS_RUN" == true ]; then
echo "Running with travis settings" echo "Running with travis settings"
REMOTE_USER="travis" REMOTE_USER="travis"
RHOST_PING="no" RHOST_PING=false
SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_3RD_PARTY_HOSTS" "" SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_3RD_PARTY_HOSTS" ""
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_3RD_PARTY_HOSTS" "" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_3RD_PARTY_HOSTS" ""
# Config value didn't have S at the end in old files # Config value didn't have S at the end in old files
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOST" "" SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOST" ""
SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_HOST_PING" "no" SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_HOST_PING" false
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_HOST_PING" "no" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_HOST_PING" false
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" "no" SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" false
else else
echo "Running with local settings" echo "Running with local settings"
REMOTE_USER="root" REMOTE_USER="root"
RHOST_PING="yes" RHOST_PING=true
SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\"" SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\""
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\"" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_3RD_PARTY_HOSTS" "\"www.kernel.org www.google.com\""
# Config value didn't have S at the end in old files # Config value didn't have S at the end in old files
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOST" "\"www.kernel.org www.google.com\"" SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_3RD_PARTY_HOST" "\"www.kernel.org www.google.com\""
SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_HOST_PING" "yes" SetConfFileValue "$CONF_DIR/$PULL_CONF" "REMOTE_HOST_PING" true
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_HOST_PING" "yes" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "REMOTE_HOST_PING" true
SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" "yes" SetConfFileValue "$CONF_DIR/$OLD_CONF" "REMOTE_HOST_PING" true
fi fi
# Get default ssh port from env # Get default ssh port from env
@ -219,8 +206,19 @@ function oneTimeSetUp () {
SSH_PORT=22 SSH_PORT=22
fi fi
#TODO: Assuming that macos has the same syntax than bsd here
if [ "$LOCAL_OS" == "msys" ] || [ "$LOCAL_OS" == "Cygwin" ]; then
SUDO_CMD=""
elif [ "$LOCAL_OS" == "BSD" ] || [ "$LOCAL_OS" == "MacOSX" ]; then
SUDO_CMD=""
else
SUDO_CMD="sudo"
fi
SetupGPG SetupGPG
if [ "$SKIP_REMOTE" != "yes" ]; then if [ "$SKIP_REMOTE" != true ]; then
SetupSSH SetupSSH
fi fi
@ -235,26 +233,32 @@ function oneTimeSetUp () {
# Set basic values that could get changed later # Set basic values that could get changed later
for i in "$LOCAL_CONF" "$PULL_CONF" "$PUSH_CONF"; do for i in "$LOCAL_CONF" "$PULL_CONF" "$PUSH_CONF"; do
SetConfFileValue "$CONF_DIR/$i" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$i" "ENCRYPTION" false
SetConfFileValue "$CONF_DIR/$i" "DATABASES_ALL" "yes" SetConfFileValue "$CONF_DIR/$i" "DATABASES_ALL" true
SetConfFileValue "$CONF_DIR/$i" "DATABASES_LIST" "mysql" SetConfFileValue "$CONF_DIR/$i" "DATABASES_LIST" "mysql"
SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" "yes" SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" true
SetConfFileValue "$CONF_DIR/$i" "DIRECTORY_LIST" "${HOME}/obackup-testdata/testData" SetConfFileValue "$CONF_DIR/$i" "DIRECTORY_LIST" "${HOME}/obackup-testdata/testData"
SetConfFileValue "$CONF_DIR/$i" "RECURSIVE_DIRECTORY_LIST" "${HOME}/obackup-testdata/testDataRecursive" SetConfFileValue "$CONF_DIR/$i" "RECURSIVE_DIRECTORY_LIST" "${HOME}/obackup-testdata/testDataRecursive"
SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" "yes" SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" true
done done
} }
function oneTimeTearDown () { function oneTimeTearDown () {
SetConfFileValue "$OBACKUP_DIR/$OBACKUP_EXECUTABLE" "IS_STABLE" "$OBACKUP_IS_STABLE" SetConfFileValue "$OBACKUP_DIR/$OBACKUP_EXECUTABLE" "IS_STABLE" "$OBACKUP_IS_STABLE"
if [ "$SKIP_REMOTE" != true ]; then
RemoveSSH RemoveSSH
fi
#TODO: uncomment this when dev is done #TODO: uncomment this when dev is done
#rm -rf "$SOURCE_DIR" #rm -rf "$SOURCE_DIR"
#rm -rf "$TARGET_DIR" #rm -rf "$TARGET_DIR"
rm -f "$TMP_FILE" rm -f "$TMP_FILE"
cd "$OBACKUP_DIR"
$SUDO_CMD ./install.sh --remove --silent --no-stats
assertEquals "Uninstall failed" "0" $?
ELAPSED_TIME=$(($SECONDS - $START_TIME)) ELAPSED_TIME=$(($SECONDS - $START_TIME))
echo "It took $ELAPSED_TIME seconds to run these tests." echo "It took $ELAPSED_TIME seconds to run these tests."
} }
@ -275,7 +279,8 @@ function setUp () {
touch "$SOURCE_DIR/$RECURSIVE_DIR/$N_FILE_1" touch "$SOURCE_DIR/$RECURSIVE_DIR/$N_FILE_1"
touch "$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_1/$R_FILE_1" touch "$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_1/$R_FILE_1"
touch "$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_2/$R_FILE_2" touch "$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_2/$R_FILE_2"
dd if=/dev/urandom of="$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_3/$R_FILE_3" bs=1M count=2 dd if=/dev/urandom of="$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_3/$R_FILE_3" bs=1048576 count=2
assertEquals "dd file creation" "0" $?
touch "$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_3/$EXCLUDED_FILE" touch "$SOURCE_DIR/$RECURSIVE_DIR/$R_DIR_3/$EXCLUDED_FILE"
FilePresence=( FilePresence=(
@ -307,11 +312,15 @@ function setUp () {
function test_Merge () { function test_Merge () {
cd "$DEV_DIR" cd "$DEV_DIR"
./merge.sh ./merge.sh obackup
assertEquals "Merging code" "0" $? assertEquals "Merging code" "0" $?
cd "$OBACKUP_DIR"
$SUDO_CMD ./install.sh --silent --no-stats
assertEquals "Install failed" "0" $?
# Set obackup version to stable while testing to avoid warning message # Set obackup version to stable while testing to avoid warning message
SetConfFileValue "$OBACKUP_DIR/$OBACKUP_EXECUTABLE" "IS_STABLE" "yes" SetConfFileValue "$OBACKUP_DIR/$OBACKUP_EXECUTABLE" "IS_STABLE" true
} }
# Keep this function to check GPG behavior depending on OS. (GPG 2.1 / GPG 2.0x / GPG 1.4 don't behave the same way) # Keep this function to check GPG behavior depending on OS. (GPG 2.1 / GPG 2.0x / GPG 1.4 don't behave the same way)
@ -322,13 +331,33 @@ function test_GPG () {
# Detect if GnuPG >= 2.1 that does not allow automatic pin entry anymore # Detect if GnuPG >= 2.1 that does not allow automatic pin entry anymore
# GnuPG 2.1.11 has a bug that does not allow usage of pinentry mode 'loopback'
# GnuPC 2.1.12 has that bug resolved
cryptToolVersion=$($CRYPT_TOOL --version | head -1 | awk '{print $3}') cryptToolVersion=$($CRYPT_TOOL --version | head -1 | awk '{print $3}')
cryptToolMajorVersion=${cryptToolVersion%%.*} cryptToolMajorVersion=${cryptToolVersion%%.*}
cryptToolSubVersion=${cryptToolVersion#*.} cryptToolSubVersion=${cryptToolVersion#*.}
cryptToolSubVersion=${cryptToolSubVersion%.*} cryptToolSubVersion=${cryptToolSubVersion%.*}
cryptToolMinorVersion=${cryptToolVersion##*.}
echo "$CRYPT_TOOL is $cryptToolVersion"
if [ $cryptToolMajorVersion -eq 2 ] && [ $cryptToolSubVersion -ge 1 ]; then if [ $cryptToolMajorVersion -eq 2 ] && [ $cryptToolSubVersion -ge 1 ]; then
if [ $cryptToolMinorVersion -gt 11 ]; then
echo "Using --pinentry-mode loopback [$cryptToolMajorVersion.$cryptToolSubVersion.$cryptToolMinorVersion]"
additionalParameters="--pinentry-mode loopback" additionalParameters="--pinentry-mode loopback"
elif [ $cryptToolMinorVersion -eq 11 ]; then
echo "Using fix to allow --pinentry-mode loopback"
[ -f "${HOME}/.gnupg/gpg-agent.conf" ] || touch "${HOME}/.gnupg/gpg-agent.conf"
echo "allow-loopback-pinentry" >> "${HOME}/.gnupg/gpg-agent.conf"
gpgconf --reload gpg-agent
cat "${HOME}/.gnupg/gpg-agent.conf"
additionalParameters="--pinentry-mode loopback"
else
echo "Not using --pinentry-mode loopback [$cryptToolMajorVersion.$cryptToolSubVersion.$cryptToolMinorVersion]"
fi
fi fi
if [ "$CRYPT_TOOL" == "gpg2" ]; then if [ "$CRYPT_TOOL" == "gpg2" ]; then
@ -339,6 +368,7 @@ function test_GPG () {
echo "Decrypt using passphrase file" echo "Decrypt using passphrase file"
echo $CRYPT_TOOL $options --out "$TESTS_DIR/$CRYPT_TESTFILE" --batch --yes $additionalParameters --passphrase-file="$TESTS_DIR/$PASSFILE" --decrypt "$TESTS_DIR/$CRYPT_TESTFILE$CRYPT_EXTENSION"
$CRYPT_TOOL $options --out "$TESTS_DIR/$CRYPT_TESTFILE" --batch --yes $additionalParameters --passphrase-file="$TESTS_DIR/$PASSFILE" --decrypt "$TESTS_DIR/$CRYPT_TESTFILE$CRYPT_EXTENSION" $CRYPT_TOOL $options --out "$TESTS_DIR/$CRYPT_TESTFILE" --batch --yes $additionalParameters --passphrase-file="$TESTS_DIR/$PASSFILE" --decrypt "$TESTS_DIR/$CRYPT_TESTFILE$CRYPT_EXTENSION"
assertEquals "Decrypt file using passfile" "0" $? assertEquals "Decrypt file using passfile" "0" $?
@ -352,7 +382,7 @@ function test_GPG () {
} }
function test_LocalRun () { function test_LocalRun () {
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "ENCRYPTION" false
# Basic return code tests. Need to go deep into file presence testing # Basic return code tests. Need to go deep into file presence testing
cd "$OBACKUP_DIR" cd "$OBACKUP_DIR"
@ -385,7 +415,7 @@ function test_LocalRun () {
done done
diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-local/$SOURCE_DIR" | grep -i Exclu diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-local/$SOURCE_DIR" | grep -i Exclu
[ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-local/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 2 ] [ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-local/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 3 ]
assertEquals "Diff should only output excluded files" "0" $? assertEquals "Diff should only output excluded files" "0" $?
# Tests presence of rotated files # Tests presence of rotated files
@ -395,20 +425,37 @@ function test_LocalRun () {
for file in "${DatabasePresence[@]}"; do for file in "${DatabasePresence[@]}"; do
[ -f "$TARGET_DIR_SQL_LOCAL/$file$ROTATE_1_EXTENSION" ] [ -f "$TARGET_DIR_SQL_LOCAL/$file$ROTATE_1_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_LOCAL/$file]" "0" $? assertEquals "Database rotated Presence [$TARGET_DIR_SQL_LOCAL/$file$ROTATE_1_EXTENSION]" "0" $?
done done
[ -d "$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ] [ -d "$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $? assertEquals "File rotated Presence [$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $?
# Second test of rotated files
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$LOCAL_CONF"
assertEquals "Return code" "0" $?
for file in "${DatabasePresence[@]}"; do
[ -f "$TARGET_DIR_SQL_LOCAL/$file$ROTATE_1_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_LOCAL/$file$ROTATE_1_EXTENSION]" "0" $?
[ -f "$TARGET_DIR_SQL_LOCAL/$file$ROTATE_2_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_LOCAL/$file$ROTATE_2_EXTENSION]" "0" $?
done
[ -d "$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $?
[ -d "$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_2_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_LOCAL/$(dirname $SOURCE_DIR)$ROTATE_2_EXTENSION]" "0" $?
} }
function test_PullRun () { function test_PullRun () {
if [ "$SKIP_REMOTE" == "yes" ]; then if [ "$SKIP_REMOTE" == true ]; then
return 0 return 0
fi fi
SetConfFileValue "$CONF_DIR/$PULL_CONF" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$PULL_CONF" "ENCRYPTION" false
# Basic return code tests. Need to go deep into file presence testing # Basic return code tests. Need to go deep into file presence testing
cd "$OBACKUP_DIR" cd "$OBACKUP_DIR"
@ -441,12 +488,11 @@ function test_PullRun () {
done done
diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu
[ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 2 ] [ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 3 ]
assertEquals "Diff should only output excluded files" "0" $? assertEquals "Diff should only output excluded files" "0" $?
# Tests presence of rotated files # Tests presence of rotated files
cd "$OBACKUP_DIR"
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$PULL_CONF" REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$PULL_CONF"
assertEquals "Return code" "0" $? assertEquals "Return code" "0" $?
@ -458,14 +504,31 @@ function test_PullRun () {
[ -d "$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ] [ -d "$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $? assertEquals "File rotated Presence [$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $?
# Second test of presence of rotated files
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$PULL_CONF"
assertEquals "Return code" "0" $?
for file in "${DatabasePresence[@]}"; do
[ -f "$TARGET_DIR_SQL_PULL/$file$ROTATE_1_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_PULL/$file$ROTATE_1_EXTENSION]" "0" $?
[ -f "$TARGET_DIR_SQL_PULL/$file$ROTATE_2_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_PULL/$file$ROTATE_2_EXTENSION]" "0" $?
done
[ -d "$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $?
[ -d "$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_2_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_PULL/$(dirname $SOURCE_DIR)$ROTATE_2_EXTENSION]" "0" $?
} }
function test_PushRun () { function test_PushRun () {
if [ "$SKIP_REMOTE" == "yes" ]; then if [ "$SKIP_REMOTE" == true ]; then
return 0 return 0
fi fi
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "ENCRYPTION" false
# Basic return code tests. Need to go deep into file presence testing # Basic return code tests. Need to go deep into file presence testing
cd "$OBACKUP_DIR" cd "$OBACKUP_DIR"
@ -498,11 +561,11 @@ function test_PushRun () {
done done
diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-push/$SOURCE_DIR" | grep -i Exclu diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-push/$SOURCE_DIR" | grep -i Exclu
[ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-push/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 2 ] [ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-push/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 3 ]
assertEquals "Diff should only output excluded files" "0" $? assertEquals "Diff should only output excluded files" "0" $?
# Tests presence of rotated files # Tests presence of rotated files
cd "$OBACKUP_DIR"
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$PUSH_CONF" REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$PUSH_CONF"
assertEquals "Return code" "0" $? assertEquals "Return code" "0" $?
@ -514,10 +577,26 @@ function test_PushRun () {
[ -d "$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ] [ -d "$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $? assertEquals "File rotated Presence [$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $?
# Second test of presence of rotated files
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$PUSH_CONF"
assertEquals "Return code" "0" $?
for file in "${DatabasePresence[@]}"; do
[ -f "$TARGET_DIR_SQL_PUSH/$file$ROTATE_1_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_PUSH/$file$ROTATE_1_EXTENSION]" "0" $?
[ -f "$TARGET_DIR_SQL_PUSH/$file$ROTATE_2_EXTENSION" ]
assertEquals "Database rotated Presence [$TARGET_DIR_SQL_PUSH/$file$ROTATE_2_EXTENSION]" "0" $?
done
[ -d "$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_1_EXTENSION]" "0" $?
[ -d "$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_2_EXTENSION" ]
assertEquals "File rotated Presence [$TARGET_DIR_FILE_PUSH/$(dirname $SOURCE_DIR)$ROTATE_2_EXTENSION]" "0" $?
} }
function test_EncryptLocalRun () { function test_EncryptLocalRun () {
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "ENCRYPTION" "yes" SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "ENCRYPTION" true
cd "$OBACKUP_DIR" cd "$OBACKUP_DIR"
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$LOCAL_CONF" REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$LOCAL_CONF"
@ -572,16 +651,16 @@ function test_EncryptLocalRun () {
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE --decrypt="$TARGET_DIR_CRYPT_LOCAL" --passphrase-file="$TESTS_DIR/$PASSFILE" REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE --decrypt="$TARGET_DIR_CRYPT_LOCAL" --passphrase-file="$TESTS_DIR/$PASSFILE"
assertEquals "Decrypt file storage in [$TARGET_DIR_CRYPT_LOCAL]" "0" $? assertEquals "Decrypt file storage in [$TARGET_DIR_CRYPT_LOCAL]" "0" $?
SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$LOCAL_CONF" "ENCRYPTION" false
} }
function test_EncryptPullRun () { function test_EncryptPullRun () {
if [ "$SKIP_REMOTE" == "yes" ]; then if [ "$SKIP_REMOTE" == true ]; then
return 0 return 0
fi fi
# Basic return code tests. Need to go deep into file presence testing # Basic return code tests. Need to go deep into file presence testing
SetConfFileValue "$CONF_DIR/$PULL_CONF" "ENCRYPTION" "yes" SetConfFileValue "$CONF_DIR/$PULL_CONF" "ENCRYPTION" true
cd "$OBACKUP_DIR" cd "$OBACKUP_DIR"
@ -615,7 +694,7 @@ function test_EncryptPullRun () {
# Only excluded files should be listed here # Only excluded files should be listed here
diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu
[ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 2 ] [ $(diff -qr "$SOURCE_DIR" "$TARGET_DIR/files-pull/$SOURCE_DIR" | grep -i Exclu | wc -l) -eq 3 ]
assertEquals "Diff should only output excluded files" "0" $? assertEquals "Diff should only output excluded files" "0" $?
# Tests presence of rotated files # Tests presence of rotated files
@ -638,16 +717,16 @@ function test_EncryptPullRun () {
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE --decrypt="$TARGET_DIR_CRYPT_PULL" --passphrase-file="$TESTS_DIR/$PASSFILE" REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE --decrypt="$TARGET_DIR_CRYPT_PULL" --passphrase-file="$TESTS_DIR/$PASSFILE"
assertEquals "Decrypt file storage in [$TARGET_DIR_CRYPT_PULL]" "0" $? assertEquals "Decrypt file storage in [$TARGET_DIR_CRYPT_PULL]" "0" $?
SetConfFileValue "$CONF_DIR/$PULL_CONF" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$PULL_CONF" "ENCRYPTION" false
} }
function test_EncryptPushRun () { function test_EncryptPushRun () {
if [ "$SKIP_REMOTE" == "yes" ]; then if [ "$SKIP_REMOTE" == true ]; then
return 0 return 0
fi fi
# Basic return code tests. Need to go deep into file presence testing # Basic return code tests. Need to go deep into file presence testing
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "ENCRYPTION" "yes" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "ENCRYPTION" true
cd "$OBACKUP_DIR" cd "$OBACKUP_DIR"
@ -703,7 +782,7 @@ function test_EncryptPushRun () {
REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE --decrypt="$TARGET_DIR_FILE_PUSH" --passphrase-file="$TESTS_DIR/$PASSFILE" REMOTE_HOST_PING=$RHOST_PING ./$OBACKUP_EXECUTABLE --decrypt="$TARGET_DIR_FILE_PUSH" --passphrase-file="$TESTS_DIR/$PASSFILE"
assertEquals "Decrypt file storage in [$TARGET_DIR_FILE_PUSH]" "0" $? assertEquals "Decrypt file storage in [$TARGET_DIR_FILE_PUSH]" "0" $?
SetConfFileValue "$CONF_DIR/$PUSH_CONF" "ENCRYPTION" "no" SetConfFileValue "$CONF_DIR/$PUSH_CONF" "ENCRYPTION" false
} }
function test_missing_databases () { function test_missing_databases () {
@ -711,25 +790,25 @@ function test_missing_databases () {
# Prepare files for missing databases # Prepare files for missing databases
for i in "$LOCAL_CONF" "$PUSH_CONF" "$PULL_CONF"; do for i in "$LOCAL_CONF" "$PUSH_CONF" "$PULL_CONF"; do
SetConfFileValue "$CONF_DIR/$i" "DATABASES_ALL" "no" SetConfFileValue "$CONF_DIR/$i" "DATABASES_ALL" false
SetConfFileValue "$CONF_DIR/$i" "DATABASES_LIST" "\"zorglub;mysql\"" SetConfFileValue "$CONF_DIR/$i" "DATABASES_LIST" "\"zorglub;mysql\""
SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" "yes" SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" true
SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" "no" SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" false
REMOTE_HOST=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$i" REMOTE_HOST=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$i"
assertEquals "Missing databases should trigger error with [$i]" "1" $? assertEquals "Missing databases should trigger error with [$i]" "1" $?
SetConfFileValue "$CONF_DIR/$i" "DATABASES_ALL" "yes" SetConfFileValue "$CONF_DIR/$i" "DATABASES_ALL" true
SetConfFileValue "$CONF_DIR/$i" "DATABASES_LIST" "mysql" SetConfFileValue "$CONF_DIR/$i" "DATABASES_LIST" "mysql"
SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" "yes" SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" true
done done
for i in "$LOCAL_CONF" "$PUSH_CONF" "$PULL_CONF"; do for i in "$LOCAL_CONF" "$PUSH_CONF" "$PULL_CONF"; do
SetConfFileValue "$CONF_DIR/$i" "DIRECTORY_LIST" "${HOME}/obackup-testdata/nonPresentData" SetConfFileValue "$CONF_DIR/$i" "DIRECTORY_LIST" "${HOME}/obackup-testdata/nonPresentData"
SetConfFileValue "$CONF_DIR/$i" "RECURSIVE_DIRECTORY_LIST" "${HOME}/obackup-testdata/nonPresentDataRecursive" SetConfFileValue "$CONF_DIR/$i" "RECURSIVE_DIRECTORY_LIST" "${HOME}/obackup-testdata/nonPresentDataRecursive"
SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" "no" SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" false
SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" "yes" SetConfFileValue "$CONF_DIR/$i" "FILE_BACKUP" true
REMOTE_HOST=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$i" REMOTE_HOST=$RHOST_PING ./$OBACKUP_EXECUTABLE "$CONF_DIR/$i"
assertEquals "Missing files should trigger error with [$i]" "1" $? assertEquals "Missing files should trigger error with [$i]" "1" $?
@ -738,7 +817,7 @@ function test_missing_databases () {
echo "nope" echo "nope"
SetConfFileValue "$CONF_DIR/$i" "DIRECTORY_LIST" "${HOME}/obackup-testdata/testData" SetConfFileValue "$CONF_DIR/$i" "DIRECTORY_LIST" "${HOME}/obackup-testdata/testData"
SetConfFileValue "$CONF_DIR/$i" "RECURSIVE_DIRECTORY_LIST" "${HOME}/obackup-testdata/testDataRecursive" SetConfFileValue "$CONF_DIR/$i" "RECURSIVE_DIRECTORY_LIST" "${HOME}/obackup-testdata/testDataRecursive"
SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" "yes" SetConfFileValue "$CONF_DIR/$i" "SQL_BACKUP" true
done done
} }

View File

@ -0,0 +1,46 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at kate.ward@forestent.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

201
dev/tests/shunit2/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

434
dev/tests/shunit2/README.md Normal file
View File

@ -0,0 +1,434 @@
# shUnit2
shUnit2 is a [xUnit](http://en.wikipedia.org/wiki/XUnit) unit test framework for Bourne based shell scripts, and it is designed to work in a similar manner to [JUnit](http://www.junit.org), [PyUnit](http://pyunit.sourceforge.net), etc.. If you have ever had the desire to write a unit test for a shell script, shUnit2 can do the job.
[![Travis CI](https://img.shields.io/travis/kward/shunit2.svg)](https://travis-ci.org/kward/shunit2)
## Table of Contents
* [Introduction](#introduction)
* [Credits / Contributors](#credits-contributors)
* [Feedback](#feedback)
* [Quickstart](#quickstart)
* [Function Reference](#function-reference)
* [General Info](#general-info)
* [Asserts](#asserts)
* [Failures](#failures)
* [Setup/Teardown](#setup-teardown)
* [Skipping](#skipping)
* [Suites](#suites)
* [Advanced Usage](#advanced-usage)
* [Some constants you can use](#some-constants-you-can-use)
* [Error Handling](#error-handling)
* [Including Line Numbers in Asserts (Macros)](#including-line-numbers-in-asserts-macros)
* [Test Skipping](#test-skipping)
* [Appendix](#appendix)
* [Getting help](#getting-help)
* [Zsh](#zsh)
---
## <a name="introduction"></a> Introduction
shUnit2 was originally developed to provide a consistent testing solution for [log4sh][log4sh], a shell based logging framework similar to [log4j](http://logging.apache.org). During the development of that product, a repeated problem of having things work just fine under one shell (`/bin/bash` on Linux to be specific), and then not working under another shell (`/bin/sh` on Solaris) kept coming up. Although several simple tests were run, they were not adequate and did not catch some corner cases. The decision was finally made to write a proper unit test framework after multiple brown-bag releases were made. _Research was done to look for an existing product that met the testing requirements, but no adequate product was found._
Tested Operating Systems (varies over time)
* Cygwin
* FreeBSD (user supported)
* Linux (Gentoo, Ubuntu)
* Mac OS X
* Solaris 8, 9, 10 (inc. OpenSolaris)
Tested Shells
* Bourne Shell (__sh__)
* BASH - GNU Bourne Again SHell (__bash__)
* DASH (__dash__)
* Korn Shell (__ksh__)
* pdksh - Public Domain Korn Shell (__pdksh__)
* zsh - Zsh (__zsh__) (since 2.1.2) _please see the Zsh shell errata for more information_
See the appropriate Release Notes for this release (`doc/RELEASE_NOTES-X.X.X.txt`) for the list of actual versions tested.
### <a name="credits-contributors"></a> Credits / Contributors
A list of contributors to shUnit2 can be found in `doc/contributors.md`. Many thanks go out to all those who have contributed to make this a better tool.
shUnit2 is the original product of many hours of work by Kate Ward, the primary author of the code. For related software, check out https://github.com/kward.
### <a name="feedback"></a> Feedback
Feedback is most certainly welcome for this document. Send your additions, comments and criticisms to the shunit2-users@google.com mailing list.
---
## <a name="quickstart"></a> Quickstart
This section will give a very quick start to running unit tests with shUnit2. More information is located in later sections.
Here is a quick sample script to show how easy it is to write a unit test in shell. _Note: the script as it stands expects that you are running it from the "examples" directory._
```sh
#! /bin/sh
# file: examples/equality_test.sh
testEquality() {
assertEquals 1 1
}
# Load shUnit2.
. ./shunit2
```
Running the unit test should give results similar to the following.
```console
$ cd examples
$ ./equality_test.sh
testEquality
Ran 1 test.
OK
```
W00t! You've just run your first successful unit test. So, what just happened? Quite a bit really, and it all happened simply by sourcing the `shunit2` library. The basic functionality for the script above goes like this:
* When shUnit2 is sourced, it will walk through any functions defined whose name starts with the string `test`, and add those to an internal list of tests to execute. Once a list of test functions to be run has been determined, shunit2 will go to work.
* Before any tests are executed, shUnit2 again looks for a function, this time one named `oneTimeSetUp()`. If it exists, it will be run. This function is normally used to setup the environment for all tests to be run. Things like creating directories for output or setting environment variables are good to place here. Just so you know, you can also declare a corresponding function named `oneTimeTearDown()` function that does the same thing, but once all the tests have been completed. It is good for removing temporary directories, etc.
* shUnit2 is now ready to run tests. Before doing so though, it again looks for another function that might be declared, one named `setUp()`. If the function exists, it will be run before each test. It is good for resetting the environment so that each test starts with a clean slate. **At this stage, the first test is finally run.** The success of the test is recorded for a report that will be generated later. After the test is run, shUnit2 looks for a final function that might be declared, one named `tearDown()`. If it exists, it will be run after each test. It is a good place for cleaning up after each test, maybe doing things like removing files that were created, or removing directories. This set of steps, `setUp() > test() > tearDown()`, is repeated for all of the available tests.
* Once all the work is done, shUnit2 will generate the nice report you saw above. A summary of all the successes and failures will be given so that you know how well your code is doing.
We should now try adding a test that fails. Change your unit test to look like this.
```sh
#! /bin/sh
# file: examples/party_test.sh
testEquality() {
assertEquals 1 1
}
testPartyLikeItIs1999() {
year=`date '+%Y'`
assertEquals "It's not 1999 :-(" '1999' "${year}"
}
# Load shUnit2.
. ./shunit2
```
So, what did you get? I guess it told you that this isn't 1999. Bummer, eh? Hopefully, you noticed a couple of things that were different about the second test. First, we added an optional message that the user will see if the assert fails. Second, we did comparisons of strings instead of integers as in the first test. It doesn't matter whether you are testing for equality of strings or integers. Both work equally well with shUnit2.
Hopefully, this is enough to get you started with unit testing. If you want a ton more examples, take a look at the tests provided with [log4sh][log4sh] or [shFlags][shflags]. Both provide excellent examples of more advanced usage. shUnit2 was after all written to meet the unit testing need that [log4sh][log4sh] had.
---
## <a name="function-reference"></a> Function Reference
### <a name="general-info"></a> General Info
Any string values passed should be properly quoted -- they should must be surrounded by single-quote (`'`) or double-quote (`"`) characters -- so that the shell will properly parse them.
### <a name="asserts"></a> Asserts
`assertEquals [message] expected actual`
Asserts that _expected_ and _actual_ are equal to one another. The _expected_ and _actual_ values can be either strings or integer values as both will be treated as strings. The _message_ is optional, and must be quoted.
`assertNotEquals [message] unexpected actual`
Asserts that _unexpected_ and _actual_ are not equal to one another. The _unexpected_ and _actual_ values can be either strings or integer values as both will be treaded as strings. The _message_ is optional, and must be quoted.
`assertSame [message] expected actual`
This function is functionally equivalent to `assertEquals`.
`assertNotSame [message] unexpected actual`
This function is functionally equivalent to `assertNotEquals`.
`assertNull [message] value`
Asserts that _value_ is _null_, or in shell terms, a zero-length string. The _value_ must be a string as an integer value does not translate into a zero-length string. The _message_ is optional, and must be quoted.
`assertNotNull [message] value`
Asserts that _value_ is _not null_, or in shell terms, a non-empty string. The _value_ may be a string or an integer as the later will be parsed as a non-empty string value. The _message_ is optional, and must be quoted.
`assertTrue [message] condition`
Asserts that a given shell test _condition_ is _true_. The condition can be as simple as a shell _true_ value (the value `0` -- equivalent to `${SHUNIT_TRUE}`), or a more sophisticated shell conditional expression. The _message_ is optional, and must be quoted.
A sophisticated shell conditional expression is equivalent to what the __if__ or __while__ shell built-ins would use (more specifically, what the __test__ command would use). Testing for example whether some value is greater than another value can be done this way.
`assertTrue "[ 34 -gt 23 ]"`
Testing for the ability to read a file can also be done. This particular test will fail.
`assertTrue 'test failed' "[ -r /some/non-existant/file' ]"`
As the expressions are standard shell __test__ expressions, it is possible to string multiple expressions together with `-a` and `-o` in the standard fashion. This test will succeed as the entire expression evaluates to _true_.
`assertTrue 'test failed' '[ 1 -eq 1 -a 2 -eq 2 ]'`
_One word of warning: be very careful with your quoting as shell is not the most forgiving of bad quoting, and things will fail in strange ways._
`assertFalse [message] condition`
Asserts that a given shell test _condition_ is _false_. The condition can be as simple as a shell _false_ value (the value `1` -- equivalent to `${SHUNIT_FALSE}`), or a more sophisticated shell conditional expression. The _message_ is optional, and must be quoted.
_For examples of more sophisticated expressions, see `assertTrue`._
### <a name="failures"></a> Failures
Just to clarify, failures __do not__ test the various arguments against one another. Failures simply fail, optionally with a message, and that is all they do. If you need to test arguments against one another, use asserts.
If all failures do is fail, why might one use them? There are times when you may have some very complicated logic that you need to test, and the simple asserts provided are simply not adequate. You can do your own validation of the code, use an `assertTrue ${SHUNIT_TRUE}` if your own tests succeeded, and use a failure to record a failure.
`fail [message]`
Fails the test immediately. The _message_ is optional, and must be quoted.
`failNotEquals [message] unexpected actual`
Fails the test immediately, reporting that the _unexpected_ and _actual_ values are not equal to one another. The _message_ is optional, and must be quoted.
_Note: no actual comparison of unexpected and actual is done._
`failSame [message] expected actual`
Fails the test immediately, reporting that the _expected_ and _actual_ values are the same. The _message_ is optional, and must be quoted.
_Note: no actual comparison of expected and actual is done._
`failNotSame [message] expected actual`
Fails the test immediately, reporting that the _expected_ and _actual_ values are not the same. The _message_ is optional, and must be quoted.
_Note: no actual comparison of expected and actual is done._
### <a name="setup-teardown"></a> Setup/Teardown
`oneTimeSetUp`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called once before any tests are run. It is useful to prepare a common environment for all tests.
`oneTimeTearDown`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called once after all tests are completed. It is useful to clean up the environment after all tests.
`setUp`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called before each test is run. It is useful to reset the environment before each test.
`tearDown`
This function can be be optionally overridden by the user in their test suite.
If this function exists, it will be called after each test completes. It is useful to clean up the environment after each test.
### <a name="skipping"></a> Skipping
`startSkipping`
This function forces the remaining _assert_ and _fail_ functions to be "skipped", i.e. they will have no effect. Each function skipped will be recorded so that the total of asserts and fails will not be altered.
`endSkipping`
This function returns calls to the _assert_ and _fail_ functions to their default behavior, i.e. they will be called.
`isSkipping`
This function returns the current state of skipping. It can be compared against `${SHUNIT_TRUE}` or `${SHUNIT_FALSE}` if desired.
### <a name="suites"></a> Suites
The default behavior of shUnit2 is that all tests will be found dynamically. If you have a specific set of tests you want to run, or you don't want to use the standard naming scheme of prefixing your tests with `test`, these functions are for you. Most users will never use them though.
`suite`
This function can be optionally overridden by the user in their test suite.
If this function exists, it will be called when `shunit2` is sourced. If it does not exist, shUnit2 will search the parent script for all functions beginning with the word `test`, and they will be added dynamically to the test suite.
`suite_addTest name`
This function adds a function named _name_ to the list of tests scheduled for execution as part of this test suite. This function should only be called from within the `suite()` function.
---
## <a name="advanced-usage"></a> Advanced Usage
### <a name="some-constants-you-can-use"></a> Some constants you can use
There are several constants provided by shUnit2 as variables that might be of use to you.
*Predefined*
| Constant | Value |
| --------------- | ----- |
| SHUNIT\_TRUE | Standard shell `true` value (the integer value 0). |
| SHUNIT\_FALSE | Standard shell `false` value (the integer value 1). |
| SHUNIT\_ERROR | The integer value 2. |
| SHUNIT\_TMPDIR | Path to temporary directory that will be automatically cleaned up upon exit of shUnit2. |
| SHUNIT\_VERSION | The version of shUnit2 you are running. |
*User defined*
| Constant | Value |
| ----------------- | ----- |
| SHUNIT\_CMD\_EXPR | Override which `expr` command is used. By default `expr` is used, except on BSD systems where `gexpr` is used. |
| SHUNIT\_COLOR | Enable colorized output. Options are 'auto', 'always', or 'never', with 'auto' being the default. |
| SHUNIT\_PARENT | The filename of the shell script containing the tests. This is needed specifically for Zsh support. |
| SHUNIT\_TEST\_PREFIX | Define this variable to add a prefix in front of each test name that is output in the test report. |
### <a name="error-handling"></a> Error handling
The constants values `SHUNIT_TRUE`, `SHUNIT_FALSE`, and `SHUNIT_ERROR` are returned from nearly every function to indicate the success or failure of the function. Additionally the variable `flags_error` is filled with a detailed error message if any function returns with a `SHUNIT_ERROR` value.
### <a name="including-line-numbers-in-asserts-macros"></a> Including Line Numbers in Asserts (Macros)
If you include lots of assert statements in an individual test function, it can become difficult to determine exactly which assert was thrown unless your messages are unique. To help somewhat, line numbers can be included in the assert messages. To enable this, a special shell "macro" must be used rather than the standard assert calls. _Shell doesn't actually have macros; the name is used here as the operation is similar to a standard macro._
For example, to include line numbers for a `assertEquals()` function call, replace the `assertEquals()` with `${_ASSERT_EQUALS_}`.
_**Example** -- Asserts with and without line numbers_
```sh
#! /bin/sh
# file: examples/lineno_test.sh
testLineNo() {
# This assert will have line numbers included (e.g. "ASSERT:[123] ...").
echo "ae: ${_ASSERT_EQUALS_}"
${_ASSERT_EQUALS_} 'not equal' 1 2
# This assert will not have line numbers included (e.g. "ASSERT: ...").
assertEquals 'not equal' 1 2
}
# Load shUnit2.
. ./shunit2
```
Notes:
1. Due to how shell parses command-line arguments, all strings used with macros should be quoted twice. Namely, single-quotes must be converted to single-double-quotes, and vice-versa. If the string being passed is absolutely for sure not empty, the extra quoting is not necessary.<br/><br/>Normal `assertEquals` call.<br/>`assertEquals 'some message' 'x' ''`<br/><br/>Macro `_ASSERT_EQUALS_` call. Note the extra quoting around the _message_ and the _null_ value.<br/>`_ASSERT_EQUALS_ '"some message"' 'x' '""'`
1. Line numbers are not supported in all shells. If a shell does not support them, no errors will be thrown. Supported shells include: __bash__ (>=3.0), __ksh__, __pdksh__, and __zsh__.
### <a name="test-skipping"></a> Test Skipping
There are times where the test code you have written is just not applicable to the system you are running on. This section describes how to skip these tests but maintain the total test count.
Probably the easiest example would be shell code that is meant to run under the __bash__ shell, but the unit test is running under the Bourne shell. There are things that just won't work. The following test code demonstrates two sample functions, one that will be run under any shell, and the another that will run only under the __bash__ shell.
_**Example** -- math include_
```sh
# file: examples/math.inc.
add_generic() {
num_a=$1
num_b=$2
expr $1 + $2
}
add_bash() {
num_a=$1
num_b=$2
echo $(($1 + $2))
}
```
And here is a corresponding unit test that correctly skips the `add_bash()` function when the unit test is not running under the __bash__ shell.
_**Example** -- math unit test_
```sh
#! /bin/sh
# file: examples/math_test.sh
testAdding() {
result=`add_generic 1 2`
assertEquals \
"the result of '${result}' was wrong" \
3 "${result}"
# Disable non-generic tests.
[ -z "${BASH_VERSION:-}" ] && startSkipping
result=`add_bash 1 2`
assertEquals \
"the result of '${result}' was wrong" \
3 "${result}"
}
oneTimeSetUp() {
# Load include to test.
. ./math.inc
}
# Load and run shUnit2.
. ./shunit2
```
Running the above test under the __bash__ shell will result in the following output.
```console
$ /bin/bash math_test.sh
testAdding
Ran 1 test.
OK
```
But, running the test under any other Unix shell will result in the following output.
```console
$ /bin/ksh math_test.sh
testAdding
Ran 1 test.
OK (skipped=1)
```
As you can see, the total number of tests has not changed, but the report indicates that some tests were skipped.
Skipping can be controlled with the following functions: `startSkipping()`, `endSkipping()`, and `isSkipping()`. Once skipping is enabled, it will remain enabled until the end of the current test function call, after which skipping is disabled.
---
## <a name="appendix"></a> Appendix
### <a name="getting-help"></a> Getting Help
For help, please send requests to either the shunit2-users@googlegroups.com mailing list (archives available on the web at http://groups.google.com/group/shunit2-users) or directly to Kate Ward <kate dot ward at forestent dot com>.
### <a name="zsh"></a> Zsh
For compatibility with Zsh, there is one requirement that must be met -- the `shwordsplit` option must be set. There are three ways to accomplish this.
1. In the unit-test script, add the following shell code snippet before sourcing the `shunit2` library.
```sh
setopt shwordsplit
```
1. When invoking __zsh__ from either the command-line or as a script with `#!`, add the `-y` parameter.
```sh
#! /bin/zsh -y
```
1. When invoking __zsh__ from the command-line, add `-o shwordsplit --` as parameters before the script name.
```console
$ zsh -o shwordsplit -- some_script
```
[log4sh]: https://github.com/kward/log4sh
[shflags]: https://github.com/kward/shflags

View File

@ -0,0 +1,88 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# This script runs the provided unit tests and sends the output to the
# appropriate file.
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
# Source following.
# shellcheck disable=SC1090,SC1091
# FLAGS variables are dynamically created.
# shellcheck disable=SC2154
# Disagree with [ p ] && [ q ] vs [ p -a -q ] recommendation.
# shellcheck disable=SC2166
# Treat unset variables as an error.
set -u
die() {
[ $# -gt 0 ] && echo "error: $*" >&2
exit 1
}
BASE_DIR=$(dirname "$0")
LIB_DIR="${BASE_DIR}/lib"
### Load libraries.
. "${LIB_DIR}/shflags" || die 'unable to load shflags library'
. "${LIB_DIR}/shlib" || die 'unable to load shlib library'
. "${LIB_DIR}/versions" || die 'unable to load versions library'
# Redefining BASE_DIR now that we have the shlib functions. We need BASE_DIR so
# that we can properly load things, even in the event that this script is called
# from a different directory.
BASE_DIR=$(shlib_relToAbsPath "${BASE_DIR}")
# Define flags.
os_name=$(versions_osName |sed 's/ /_/g')
os_version=$(versions_osVersion)
DEFINE_boolean force false 'force overwrite' f
DEFINE_string output_dir "${TMPDIR}" 'output dir' d
DEFINE_string output_file "${os_name}-${os_version}.txt" 'output file' o
DEFINE_string runner 'test_runner' 'unit test runner' r
DEFINE_boolean dry_run false "suppress logging to a file" n
main() {
# Determine output filename.
# shellcheck disable=SC2154
output="${FLAGS_output_dir:+${FLAGS_output_dir}/}${FLAGS_output_file}"
output=$(shlib_relToAbsPath "${output}")
# Checks.
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" -a -f "${output}" ]; then
if [ "${FLAGS_force}" -eq "${FLAGS_TRUE}" ]; then
rm -f "${output}"
else
echo "not overwriting '${output}'" >&2
exit "${FLAGS_ERROR}"
fi
fi
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" ]; then
touch "${output}" 2>/dev/null || die "unable to write to '${output}'"
fi
# Run tests.
(
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" ]; then
"./${FLAGS_runner}" |tee "${output}"
else
"./${FLAGS_runner}"
fi
)
if [ "${FLAGS_dry_run}" -eq "${FLAGS_FALSE}" ]; then
echo >&2
echo "Output written to '${output}'." >&2
fi
}
FLAGS "$@" || exit $?
[ "${FLAGS_help}" -eq "${FLAGS_FALSE}" ] || exit
eval set -- "${FLAGS_ARGV}"
main "${@:-}"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,39 @@
# vim:et:ft=sh:sts=2:sw=2
#
# Copyright 2008 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License).
#
# Author: kate.ward@forestent.com (Kate Ward)
#
# Library of shell functions.
# Convert a relative path into it's absolute equivalent.
#
# This function will automatically prepend the current working directory if the
# path is not already absolute. It then removes all parent references (../) to
# reconstruct the proper absolute path.
#
# Args:
# shlib_path_: string: relative path
# Outputs:
# string: absolute path
shlib_relToAbsPath()
{
shlib_path_=$1
# prepend current directory to relative paths
echo "${shlib_path_}" |grep '^/' >/dev/null 2>&1 \
|| shlib_path_="${PWD}/${shlib_path_}"
# clean up the path. if all seds supported true regular expressions, then
# this is what it would be:
shlib_old_=${shlib_path_}
while true; do
shlib_new_=`echo "${shlib_old_}" |sed 's/[^/]*\/\.\.\/*//;s/\/\.\//\//'`
[ "${shlib_old_}" = "${shlib_new_}" ] && break
shlib_old_=${shlib_new_}
done
echo "${shlib_new_}"
unset shlib_path_ shlib_old_ shlib_new_
}

272
dev/tests/shunit2/lib/versions Executable file
View File

@ -0,0 +1,272 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# Versions determines the versions of all installed shells.
#
# Copyright 2008-2018 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 License.
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shlib
#
# This library provides reusable functions that determine actual names and
# versions of installed shells and the OS. The library can also be run as a
# script if set executable.
#
# Disable checks that aren't fully portable (POSIX != portable).
# shellcheck disable=SC2006
ARGV0=`basename "$0"`
LSB_RELEASE='/etc/lsb-release'
VERSIONS_SHELLS='ash /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh /bin/sh /usr/xpg4/bin/sh /sbin/sh'
true; TRUE=$?
false; FALSE=$?
ERROR=2
UNAME_R=`uname -r`
UNAME_S=`uname -s`
__versions_haveStrings=${ERROR}
versions_osName() {
os_name_='unrecognized'
os_system_=${UNAME_S}
os_release_=${UNAME_R}
case ${os_system_} in
CYGWIN_NT-*) os_name_='Cygwin' ;;
Darwin)
os_name_=`/usr/bin/sw_vers -productName`
os_version_=`versions_osVersion`
case ${os_version_} in
10.4|10.4.[0-9]*) os_name_='Mac OS X Tiger' ;;
10.5|10.5.[0-9]*) os_name_='Mac OS X Leopard' ;;
10.6|10.6.[0-9]*) os_name_='Mac OS X Snow Leopard' ;;
10.7|10.7.[0-9]*) os_name_='Mac OS X Lion' ;;
10.8|10.8.[0-9]*) os_name_='Mac OS X Mountain Lion' ;;
10.9|10.9.[0-9]*) os_name_='Mac OS X Mavericks' ;;
10.10|10.10.[0-9]*) os_name_='Mac OS X Yosemite' ;;
10.11|10.11.[0-9]*) os_name_='Mac OS X El Capitan' ;;
10.12|10.12.[0-9]*) os_name_='macOS Sierra' ;;
10.13|10.13.[0-9]*) os_name_='macOS High Sierra' ;;
*) os_name_='macOS' ;;
esac
;;
FreeBSD) os_name_='FreeBSD' ;;
Linux) os_name_='Linux' ;;
SunOS)
os_name_='SunOS'
if [ -r '/etc/release' ]; then
if grep 'OpenSolaris' /etc/release >/dev/null; then
os_name_='OpenSolaris'
else
os_name_='Solaris'
fi
fi
;;
esac
echo ${os_name_}
unset os_name_ os_system_ os_release_ os_version_
}
versions_osVersion() {
os_version_='unrecognized'
os_system_=${UNAME_S}
os_release_=${UNAME_R}
case ${os_system_} in
CYGWIN_NT-*)
os_version_=`expr "${os_release_}" : '\([0-9]*\.[0-9]\.[0-9]*\).*'`
;;
Darwin)
os_version_=`/usr/bin/sw_vers -productVersion`
;;
FreeBSD)
os_version_=`expr "${os_release_}" : '\([0-9]*\.[0-9]*\)-.*'`
;;
Linux)
if [ -r '/etc/os-release' ]; then
os_version_=`awk -F= '$1~/PRETTY_NAME/{print $2}' /etc/os-release \
|sed 's/"//g'`
elif [ -r '/etc/redhat-release' ]; then
os_version_=`cat /etc/redhat-release`
elif [ -r '/etc/SuSE-release' ]; then
os_version_=`head -n 1 /etc/SuSE-release`
elif [ -r "${LSB_RELEASE}" ]; then
if grep -q 'DISTRIB_ID=Ubuntu' "${LSB_RELEASE}"; then
# shellcheck disable=SC2002
os_version_=`cat "${LSB_RELEASE}" \
|awk -F= '$1~/DISTRIB_DESCRIPTION/{print $2}' \
|sed 's/"//g;s/ /-/g'`
fi
fi
;;
SunOS)
if [ -r '/etc/release' ]; then
if grep 'OpenSolaris' /etc/release >/dev/null; then # OpenSolaris
os_version_=`grep 'OpenSolaris' /etc/release |awk '{print $2"("$3")"}'`
else # Solaris
major_=`echo "${os_release_}" |sed 's/[0-9]*\.\([0-9]*\)/\1/'`
minor_=`grep Solaris /etc/release |sed 's/[^u]*\(u[0-9]*\).*/\1/'`
os_version_="${major_}${minor_}"
fi
fi
;;
esac
echo "${os_version_}"
unset os_release_ os_system_ os_version_ major_ minor_
}
versions_shellVersion() {
shell_=$1
shell_present_=${FALSE}
case "${shell_}" in
ash) [ -x '/bin/busybox' ] && shell_present_=${TRUE} ;;
*) [ -x "${shell_}" ] && shell_present_=${TRUE} ;;
esac
if [ ${shell_present_} -eq ${FALSE} ]; then
echo 'not installed'
return ${FALSE}
fi
version_=''
case ${shell_} in
/sbin/sh) ;; # SunOS
/usr/xpg4/bin/sh)
version_=`versions_shell_xpg4 "${shell_}"`
;; # SunOS
*/sh)
# This could be one of any number of shells. Try until one fits.
version_=''
[ -z "${version_}" ] && version_=`versions_shell_bash "${shell_}"`
# dash cannot be self determined yet
[ -z "${version_}" ] && version_=`versions_shell_ksh "${shell_}"`
# pdksh is covered in versions_shell_ksh()
[ -z "${version_}" ] && version_=`versions_shell_xpg4 "${shell_}"`
[ -z "${version_}" ] && version_=`versions_shell_zsh "${shell_}"`
;;
ash) version_=`versions_shell_ash "${shell_}"` ;;
*/bash) version_=`versions_shell_bash "${shell_}"` ;;
*/dash)
# Assuming Ubuntu Linux until somebody comes up with a better test. The
# following test will return an empty string if dash is not installed.
version_=`versions_shell_dash`
;;
*/ksh) version_=`versions_shell_ksh "${shell_}"` ;;
*/pdksh) version_=`versions_shell_pdksh "${shell_}"` ;;
*/zsh) version_=`versions_shell_zsh "${shell_}"` ;;
*) version_='invalid'
esac
echo "${version_:-unknown}"
unset shell_ version_
}
# The ash shell is included in BusyBox.
versions_shell_ash() {
busybox --help |head -1 |sed 's/BusyBox v\([0-9.]*\) .*/\1/'
}
versions_shell_bash() {
$1 --version : 2>&1 |grep 'GNU bash' |sed 's/.*version \([^ ]*\).*/\1/'
}
versions_shell_dash() {
eval dpkg >/dev/null 2>&1
[ $? -eq 127 ] && return # Return if dpkg not found.
dpkg -l |grep ' dash ' |awk '{print $3}'
}
versions_shell_ksh() {
versions_shell_=$1
versions_version_=''
# Try a few different ways to figure out the version.
versions_version_=`${versions_shell_} --version : 2>&1`
# shellcheck disable=SC2181
if [ $? -eq 0 ]; then
versions_version_=`echo "${versions_version_}" \
|sed 's/.*\([0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\).*/\1/'`
else
versions_version_=''
fi
if [ -z "${versions_version_}" ]; then
_versions_have_strings
versions_version_=`strings "${versions_shell_}" 2>&1 \
|grep Version \
|sed 's/^.*Version \(.*\)$/\1/;s/ s+ \$$//;s/ /-/g'`
fi
if [ -z "${versions_version_}" ]; then
versions_version_=`versions_shell_pdksh "${versions_shell_}"`
fi
echo "${versions_version_}"
unset versions_shell_ versions_version_
}
versions_shell_pdksh() {
_versions_have_strings
strings "$1" 2>&1 \
|grep 'PD KSH' \
|sed -e 's/.*PD KSH \(.*\)/\1/;s/ /-/g'
}
versions_shell_xpg4() {
_versions_have_strings
strings "$1" 2>&1 \
|grep 'Version' \
|sed -e 's/^@(#)Version //'
}
versions_shell_zsh() {
versions_shell_=$1
# Try a few different ways to figure out the version.
# shellcheck disable=SC2016
versions_version_=`echo 'echo ${ZSH_VERSION}' |${versions_shell_}`
if [ -z "${versions_version_}" ]; then
versions_version_=`${versions_shell_} --version : 2>&1`
# shellcheck disable=SC2181
if [ $? -eq 0 ]; then
versions_version_=`echo "${versions_version_}" |awk '{print $2}'`
else
versions_version_=''
fi
fi
echo "${versions_version_}"
unset versions_shell_ versions_version_
}
# Determine if the 'strings' binary installed.
_versions_have_strings() {
[ ${__versions_haveStrings} -ne ${ERROR} ] && return
if eval strings /dev/null >/dev/null 2>&1; then
__versions_haveStrings=${TRUE}
return
fi
echo 'WARN: strings not installed. try installing binutils?' >&2
__versions_haveStrings=${FALSE}
}
versions_main() {
# Treat unset variables as an error.
set -u
os_name=`versions_osName`
os_version=`versions_osVersion`
echo "os: ${os_name} version: ${os_version}"
for shell in ${VERSIONS_SHELLS}; do
shell_version=`versions_shellVersion "${shell}"`
echo "shell: ${shell} version: ${shell_version}"
done
}
if [ "${ARGV0}" = 'versions' ]; then
versions_main "$@"
fi

File diff suppressed because it is too large Load Diff

View File

@ -1,23 +1,25 @@
#! /bin/sh #! /bin/sh
# $Id$
# vim:et:ft=sh:sts=2:sw=2 # vim:et:ft=sh:sts=2:sw=2
# #
# Copyright 2008 Kate Ward. All Rights Reserved. # shunit2 unit test for assert functions.
# Released under the LGPL (GNU Lesser General Public License) #
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
# #
# Author: kate.ward@forestent.com (Kate Ward) # Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
# #
# shUnit2 unit test for assert functions # Disable source following.
# shellcheck disable=SC1090,SC1091
# load test helpers # These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
stderrF="${TMPDIR:-/tmp}/STDERR"
# Load test helpers.
. ./shunit2_test_helpers . ./shunit2_test_helpers
#------------------------------------------------------------------------------ commonEqualsSame() {
# suite tests
#
commonEqualsSame()
{
fn=$1 fn=$1
( ${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${fn} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
@ -42,8 +44,7 @@ commonEqualsSame()
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
commonNotEqualsSame() commonNotEqualsSame() {
{
fn=$1 fn=$1
( ${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${fn} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
@ -65,28 +66,23 @@ commonNotEqualsSame()
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
testAssertEquals() testAssertEquals() {
{
commonEqualsSame 'assertEquals' commonEqualsSame 'assertEquals'
} }
testAssertNotEquals() testAssertNotEquals() {
{
commonNotEqualsSame 'assertNotEquals' commonNotEqualsSame 'assertNotEquals'
} }
testAssertSame() testAssertSame() {
{
commonEqualsSame 'assertSame' commonEqualsSame 'assertSame'
} }
testAssertNotSame() testAssertNotSame() {
{
commonNotEqualsSame 'assertNotSame' commonNotEqualsSame 'assertNotSame'
} }
testAssertNull() testAssertNull() {
{
( assertNull '' >"${stdoutF}" 2>"${stderrF}" ) ( assertNull '' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'null' $? "${stdoutF}" "${stderrF}" th_assertTrueWithNoOutput 'null' $? "${stdoutF}" "${stderrF}"
@ -119,6 +115,7 @@ testAssertNotNull()
th_assertTrueWithNoOutput 'not null, with single-quote' $? \ th_assertTrueWithNoOutput 'not null, with single-quote' $? \
"${stdoutF}" "${stderrF}" "${stdoutF}" "${stderrF}"
# shellcheck disable=SC2016
( assertNotNull 'x$b' >"${stdoutF}" 2>"${stderrF}" ) ( assertNotNull 'x$b' >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'not null, with dollar' $? \ th_assertTrueWithNoOutput 'not null, with dollar' $? \
"${stdoutF}" "${stderrF}" "${stdoutF}" "${stderrF}"
@ -130,14 +127,13 @@ testAssertNotNull()
( assertNotNull '' >"${stdoutF}" 2>"${stderrF}" ) ( assertNotNull '' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'null' $? "${stdoutF}" "${stderrF}" th_assertFalseWithOutput 'null' $? "${stdoutF}" "${stderrF}"
# there is no test for too few arguments as $1 might actually be null # There is no test for too few arguments as $1 might actually be null.
( assertNotNull arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" ) ( assertNotNull arg1 arg2 arg3 >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
testAssertTrue() testAssertTrue() {
{
( assertTrue 0 >"${stdoutF}" 2>"${stderrF}" ) ( assertTrue 0 >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'true' $? "${stdoutF}" "${stderrF}" th_assertTrueWithNoOutput 'true' $? "${stdoutF}" "${stderrF}"
@ -163,8 +159,7 @@ testAssertTrue()
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
testAssertFalse() testAssertFalse() {
{
( assertFalse 1 >"${stdoutF}" 2>"${stderrF}" ) ( assertFalse 1 >"${stdoutF}" 2>"${stderrF}" )
th_assertTrueWithNoOutput 'false' $? "${stdoutF}" "${stderrF}" th_assertTrueWithNoOutput 'false' $? "${stdoutF}" "${stderrF}"
@ -190,17 +185,13 @@ testAssertFalse()
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
#------------------------------------------------------------------------------ oneTimeSetUp() {
# suite functions
#
oneTimeSetUp()
{
th_oneTimeSetUp th_oneTimeSetUp
MSG='This is a test message' MSG='This is a test message'
} }
# load and run shUnit2 # Load and run shunit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0 [ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
. ${TH_SHUNIT} . "${TH_SHUNIT}"

View File

@ -1,23 +1,25 @@
#! /bin/sh #! /bin/sh
# $Id$
# vim:et:ft=sh:sts=2:sw=2 # vim:et:ft=sh:sts=2:sw=2
# #
# Copyright 2008 Kate Ward. All Rights Reserved. # shUnit2 unit test for failure functions
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License) # Released under the LGPL (GNU Lesser General Public License)
# #
# Author: kate.ward@forestent.com (Kate Ward) # Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
# #
# shUnit2 unit test for failure functions # Disable source following.
# shellcheck disable=SC1090,SC1091
# load common unit-test functions # These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
stderrF="${TMPDIR:-/tmp}/STDERR"
# Load test helpers.
. ./shunit2_test_helpers . ./shunit2_test_helpers
#----------------------------------------------------------------------------- testFail() {
# suite tests
#
testFail()
{
( fail >"${stdoutF}" 2>"${stderrF}" ) ( fail >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'fail' $? "${stdoutF}" "${stderrF}" th_assertFalseWithOutput 'fail' $? "${stdoutF}" "${stderrF}"
@ -28,8 +30,7 @@ testFail()
th_assertFalseWithOutput 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithOutput 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
testFailNotEquals() testFailNotEquals() {
{
( failNotEquals 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( failNotEquals 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}" th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
@ -49,8 +50,7 @@ testFailNotEquals()
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
testFailSame() testFailSame() {
{
( failSame 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( failSame 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}" th_assertFalseWithOutput 'same' $? "${stdoutF}" "${stderrF}"
@ -70,17 +70,13 @@ testFailSame()
th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}" th_assertFalseWithError 'too many arguments' $? "${stdoutF}" "${stderrF}"
} }
#----------------------------------------------------------------------------- oneTimeSetUp() {
# suite functions
#
oneTimeSetUp()
{
th_oneTimeSetUp th_oneTimeSetUp
MSG='This is a test message' MSG='This is a test message'
} }
# load and run shUnit2 # Load and run shUnit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0 [ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
. ${TH_SHUNIT} . "${TH_SHUNIT}"

View File

@ -1,108 +1,110 @@
#! /bin/sh #! /bin/sh
# $Id$
# vim:et:ft=sh:sts=2:sw=2 # vim:et:ft=sh:sts=2:sw=2
# #
# Copyright 2008 Kate Ward. All Rights Reserved. # shunit2 unit test for macros.
# Released under the LGPL (GNU Lesser General Public License)
# Author: kate.ward@forestent.com (Kate Ward)
# #
# shUnit2 unit test for macros. # Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
### ShellCheck http://www.shellcheck.net/
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Presence of LINENO variable is checked.
# shellcheck disable=SC2039
# load test helpers # These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
stderrF="${TMPDIR:-/tmp}/STDERR"
# Load test helpers.
. ./shunit2_test_helpers . ./shunit2_test_helpers
#------------------------------------------------------------------------------ testAssertEquals() {
# suite tests # Start skipping if LINENO not available.
#
testAssertEquals()
{
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_EQUALS_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_EQUALS_ failure' ${rtrn} assertTrue '_ASSERT_EQUALS_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_EQUALS_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_EQUALS_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testAssertNotEquals() testAssertNotEquals() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_NOT_EQUALS_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NOT_EQUALS_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NOT_EQUALS_ failure' ${rtrn} assertTrue '_ASSERT_NOT_EQUALS_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_NOT_EQUALS_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NOT_EQUALS_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NOT_EQUALS_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_NOT_EQUALS_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testSame() testSame() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_SAME_ failure' ${rtrn} assertTrue '_ASSERT_SAME_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_SAME_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_SAME_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testNotSame() testNotSame() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_NOT_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NOT_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NOT_SAME_ failure' ${rtrn} assertTrue '_ASSERT_NOT_SAME_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_NOT_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NOT_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NOT_SAME_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_NOT_SAME_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testNull() testNull() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_NULL_} 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NULL_} 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NULL_ failure' ${rtrn} assertTrue '_ASSERT_NULL_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_NULL_} '"some msg"' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NULL_} '"some msg"' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NULL_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_NULL_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testNotNull() testNotNull()
@ -114,68 +116,64 @@ testNotNull()
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NOT_NULL_ failure' ${rtrn} assertTrue '_ASSERT_NOT_NULL_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_NOT_NULL_} '"some msg"' '""' >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_NOT_NULL_} '"some msg"' '""' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_NOT_NULL_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_NOT_NULL_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stdoutF}" "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stdoutF}" "${stderrF}" >&2
} }
testAssertTrue() testAssertTrue() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_TRUE_} ${SHUNIT_FALSE} >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_TRUE_} "${SHUNIT_FALSE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_TRUE_ failure' ${rtrn} assertTrue '_ASSERT_TRUE_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_TRUE_} '"some msg"' "${SHUNIT_FALSE}" >"${stdoutF}" 2>"${stderrF}" )
( ${_ASSERT_TRUE_} '"some msg"' ${SHUNIT_FALSE} >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_TRUE_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_TRUE_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testAssertFalse() testAssertFalse() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_ASSERT_FALSE_} ${SHUNIT_TRUE} >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_FALSE_} "${SHUNIT_TRUE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_FALSE_ failure' ${rtrn} assertTrue '_ASSERT_FALSE_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_ASSERT_FALSE_} '"some msg"' ${SHUNIT_TRUE} >"${stdoutF}" 2>"${stderrF}" ) ( ${_ASSERT_FALSE_} '"some msg"' "${SHUNIT_TRUE}" >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_ASSERT_FALSE_ w/ msg failure' ${rtrn} assertTrue '_ASSERT_FALSE_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testFail() testFail() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_FAIL_} >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_} >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_ failure' ${rtrn} assertTrue '_FAIL_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_FAIL_} '"some msg"' >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_} '"some msg"' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_ w/ msg failure' ${rtrn} assertTrue '_FAIL_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testFailNotEquals() testFailNotEquals()
@ -187,60 +185,57 @@ testFailNotEquals()
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_NOT_EQUALS_ failure' ${rtrn} assertTrue '_FAIL_NOT_EQUALS_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_FAIL_NOT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_NOT_EQUALS_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_NOT_EQUALS_ w/ msg failure' ${rtrn} assertTrue '_FAIL_NOT_EQUALS_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testFailSame() testFailSame() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_FAIL_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_SAME_} 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_SAME_ failure' ${rtrn} assertTrue '_FAIL_SAME_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_FAIL_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_SAME_} '"some msg"' 'x' 'x' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_SAME_ w/ msg failure' ${rtrn} assertTrue '_FAIL_SAME_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
testFailNotSame() testFailNotSame() {
{ # Start skipping if LINENO not available.
# start skipping if LINENO not available
[ -z "${LINENO:-}" ] && startSkipping [ -z "${LINENO:-}" ] && startSkipping
( ${_FAIL_NOT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_NOT_SAME_} 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_NOT_SAME_ failure' ${rtrn} assertTrue '_FAIL_NOT_SAME_ failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
( ${_FAIL_NOT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" ) ( ${_FAIL_NOT_SAME_} '"some msg"' 'x' 'y' >"${stdoutF}" 2>"${stderrF}" )
grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null grep '^ASSERT:\[[0-9]*\] *' "${stdoutF}" >/dev/null
rtrn=$? rtrn=$?
assertTrue '_FAIL_NOT_SAME_ w/ msg failure' ${rtrn} assertTrue '_FAIL_NOT_SAME_ w/ msg failure' ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2 [ "${rtrn}" -ne "${SHUNIT_TRUE}" ] && cat "${stderrF}" >&2
} }
#------------------------------------------------------------------------------ oneTimeSetUp() {
# suite functions
#
oneTimeSetUp()
{
th_oneTimeSetUp th_oneTimeSetUp
} }
# load and run shUnit2 # Disable output coloring as it breaks the tests.
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0 SHUNIT_COLOR='none'; export SHUNIT_COLOR
. ${TH_SHUNIT}
# Load and run shUnit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT="$0"
. "${TH_SHUNIT}"

View File

@ -0,0 +1,262 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# shUnit2 unit tests of miscellaneous things
#
# Copyright 2008-2018 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
### ShellCheck http://www.shellcheck.net/
# $() are not fully portable (POSIX != portable).
# shellcheck disable=SC2006
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Not wanting to escape single quotes.
# shellcheck disable=SC1003
# These variables will be overridden by the test helpers.
stdoutF="${TMPDIR:-/tmp}/STDOUT"
stderrF="${TMPDIR:-/tmp}/STDERR"
# Load test helpers.
. ./shunit2_test_helpers
# Note: the test script is prefixed with '#' chars so that shUnit2 does not
# incorrectly interpret the embedded functions as real functions.
testUnboundVariable() {
unittestF="${SHUNIT_TMPDIR}/unittest"
sed 's/^#//' >"${unittestF}" <<EOF
## Treat unset variables as an error when performing parameter expansion.
#set -u
#
#boom() { x=\$1; } # This function goes boom if no parameters are passed!
#test_boom() {
# assertEquals 1 1
# boom # No parameter given
# assertEquals 0 \$?
#}
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
assertFalse 'expected a non-zero exit value' $?
grep '^ASSERT:Unknown failure' "${stdoutF}" >/dev/null
assertTrue 'assert message was not generated' $?
grep '^Ran [0-9]* test' "${stdoutF}" >/dev/null
assertTrue 'test count message was not generated' $?
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue 'failure message was not generated' $?
}
# assertEquals repeats message argument.
# https://github.com/kward/shunit2/issues/7
testIssue7() {
# Disable coloring so 'ASSERT:' lines can be matched correctly.
_shunit_configureColor 'none'
( assertEquals 'Some message.' 1 2 >"${stdoutF}" 2>"${stderrF}" )
diff "${stdoutF}" - >/dev/null <<EOF
ASSERT:Some message. expected:<1> but was:<2>
EOF
rtrn=$?
assertEquals "${SHUNIT_TRUE}" "${rtrn}"
[ "${rtrn}" -eq "${SHUNIT_TRUE}" ] || cat "${stderrF}" >&2
}
# Support prefixes on test output.
# https://github.com/kward/shunit2/issues/29
testIssue29() {
unittestF="${SHUNIT_TMPDIR}/unittest"
sed 's/^#//' >"${unittestF}" <<EOF
## Support test prefixes.
#test_assert() { assertTrue ${SHUNIT_TRUE}; }
#SHUNIT_COLOR='none'
#SHUNIT_TEST_PREFIX='--- '
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
grep '^--- test_assert' "${stdoutF}" >/dev/null
rtrn=$?
assertEquals "${SHUNIT_TRUE}" "${rtrn}"
[ "${rtrn}" -eq "${SHUNIT_TRUE}" ] || cat "${stdoutF}" >&2
}
# shUnit2 should not exit with 0 when it has syntax errors.
# https://github.com/kward/shunit2/issues/69
testIssue69() {
unittestF="${SHUNIT_TMPDIR}/unittest"
for t in Equals NotEquals Null NotNull Same NotSame True False; do
assert="assert${t}"
sed 's/^#//' >"${unittestF}" <<EOF
## Asserts with invalid argument counts should be counted as failures.
#test_assert() { ${assert}; }
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue "failure message for ${assert} was not generated" $?
done
}
# Ensure that test fails if setup/teardown functions fail.
testIssue77() {
unittestF="${SHUNIT_TMPDIR}/unittest"
for func in oneTimeSetUp setUp tearDown oneTimeTearDown; do
sed 's/^#//' >"${unittestF}" <<EOF
## Environment failure should end test.
#${func}() { return ${SHUNIT_FALSE}; }
#test_true() { assertTrue ${SHUNIT_TRUE}; }
#SHUNIT_COLOR='none'
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue "failure of ${func}() did not end test" $?
done
}
# Ensure a test failure is recorded for code containing syntax errors.
# https://github.com/kward/shunit2/issues/84
testIssue84() {
unittestF="${SHUNIT_TMPDIR}/unittest"
sed 's/^#//' >"${unittestF}" <<\EOF
## Function with syntax error.
#syntax_error() { ${!#3442} -334 a$@2[1]; }
#test_syntax_error() {
# syntax_error
# assertTrue ${SHUNIT_TRUE}
#}
#SHUNIT_COLOR='none'
#SHUNIT_TEST_PREFIX='--- '
#. ${TH_SHUNIT}
EOF
( exec "${SHUNIT_SHELL:-sh}" "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue "failure message for ${assert} was not generated" $?
}
testPrepForSourcing() {
assertEquals '/abc' "`_shunit_prepForSourcing '/abc'`"
assertEquals './abc' "`_shunit_prepForSourcing './abc'`"
assertEquals './abc' "`_shunit_prepForSourcing 'abc'`"
}
testEscapeCharInStr() {
while read -r desc char str want; do
got=`_shunit_escapeCharInStr "${char}" "${str}"`
assertEquals "${desc}" "${want}" "${got}"
done <<'EOF'
backslash \ '' ''
backslash_pre \ \def \\def
backslash_mid \ abc\def abc\\def
backslash_post \ abc\ abc\\
quote " '' ''
quote_pre " "def \"def
quote_mid " abc"def abc\"def
quote_post " abc" abc\"
string $ '' ''
string_pre $ $def \$def
string_mid $ abc$def abc\$def
string_post $ abc$ abc\$
EOF
# TODO(20170924:kward) fix or remove.
# actual=`_shunit_escapeCharInStr "'" ''`
# assertEquals '' "${actual}"
# assertEquals "abc\\'" `_shunit_escapeCharInStr "'" "abc'"`
# assertEquals "abc\\'def" `_shunit_escapeCharInStr "'" "abc'def"`
# assertEquals "\\'def" `_shunit_escapeCharInStr "'" "'def"`
# # Must put the backtick in a variable so the shell doesn't misinterpret it
# # while inside a backticked sequence (e.g. `echo '`'` would fail).
# backtick='`'
# actual=`_shunit_escapeCharInStr ${backtick} ''`
# assertEquals '' "${actual}"
# assertEquals '\`abc' \
# `_shunit_escapeCharInStr "${backtick}" ${backtick}'abc'`
# assertEquals 'abc\`' \
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}`
# assertEquals 'abc\`def' \
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}'def'`
}
testEscapeCharInStr_specialChars() {
# Make sure our forward slash doesn't upset sed.
assertEquals '/' "`_shunit_escapeCharInStr '\' '/'`"
# Some shells escape these differently.
# TODO(20170924:kward) fix or remove.
#assertEquals '\\a' `_shunit_escapeCharInStr '\' '\a'`
#assertEquals '\\b' `_shunit_escapeCharInStr '\' '\b'`
}
# Test the various ways of declaring functions.
#
# Prefixing (then stripping) with comment symbol so these functions aren't
# treated as real functions by shUnit2.
testExtractTestFunctions() {
f="${SHUNIT_TMPDIR}/extract_test_functions"
sed 's/^#//' <<EOF >"${f}"
## Function on a single line.
#testABC() { echo 'ABC'; }
## Multi-line function with '{' on next line.
#test_def()
# {
# echo 'def'
#}
## Multi-line function with '{' on first line.
#testG3 () {
# echo 'G3'
#}
## Function with numerical values in name.
#function test4() { echo '4'; }
## Leading space in front of function.
# test5() { echo '5'; }
## Function with '_' chars in name.
#some_test_function() { echo 'some func'; }
## Function that sets variables.
#func_with_test_vars() {
# testVariable=1234
#}
EOF
actual=`_shunit_extractTestFunctions "${f}"`
assertEquals 'testABC test_def testG3 test4 test5' "${actual}"
}
# Test that certain external commands sometimes "stubbed" by users
# are escaped. See Issue #54.
testProtectedCommands() {
for c in mkdir rm cat chmod; do
grep "^[^#]*${c} " "${TH_SHUNIT}" | grep -qv "command ${c}"
assertFalse "external call to ${c} not protected somewhere" $?
done
grep '^[^#]*[^ ] *\[' "${TH_SHUNIT}" | grep -qv 'command \['
assertFalse "call to [ ... ] not protected somewhere" $?
grep '^[^#]* *\.' "${TH_SHUNIT}" | grep -qv 'command \.'
assertFalse "call to . not protected somewhere" $?
}
setUp() {
for f in "${stdoutF}" "${stderrF}"; do
cp /dev/null "${f}"
done
# Reconfigure coloring as some tests override default behavior.
_shunit_configureColor "${SHUNIT_COLOR_DEFAULT}"
}
oneTimeSetUp() {
SHUNIT_COLOR_DEFAULT="${SHUNIT_COLOR}"
th_oneTimeSetUp
}
# Load and run shUnit2.
# shellcheck disable=SC2034
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
. "${TH_SHUNIT}"

View File

@ -1,41 +1,38 @@
#! /bin/sh #! /bin/sh
# $Id$
# vim:et:ft=sh:sts=2:sw=2 # vim:et:ft=sh:sts=2:sw=2
# #
# Copyright 2010 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License)
# Author: kate.ward@forestent.com (Kate Ward)
#
# shUnit2 unit test for standalone operation. # shUnit2 unit test for standalone operation.
# #
# Copyright 2010-2017 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
#
# This unit test is purely to test that calling shunit2 directly, while passing # This unit test is purely to test that calling shunit2 directly, while passing
# the name of a unit test script, works. When run, this script determines if it # the name of a unit test script, works. When run, this script determines if it
# is running as a standalone program, and calls main() if it is. # is running as a standalone program, and calls main() if it is.
#
### ShellCheck http://www.shellcheck.net/
# $() are not fully portable (POSIX != portable).
# shellcheck disable=SC2006
# Disable source following.
# shellcheck disable=SC1090,SC1091
ARGV0=`basename "$0"` ARGV0="`basename "$0"`"
# load test helpers # Load test helpers.
. ./shunit2_test_helpers . ./shunit2_test_helpers
#------------------------------------------------------------------------------ testStandalone() {
# suite tests assertTrue "${SHUNIT_TRUE}"
#
testStandalone()
{
assertTrue ${SHUNIT_TRUE}
} }
#------------------------------------------------------------------------------ main() {
# main
#
main()
{
${TH_SHUNIT} "${ARGV0}" ${TH_SHUNIT} "${ARGV0}"
} }
# are we running as a standalone? # Are we running as a standalone?
if [ "${ARGV0}" = 'shunit2_test_standalone.sh' ]; then if [ "${ARGV0}" = 'shunit2_test_standalone.sh' ]; then
if [ $# -gt 0 ]; then main "$@"; else main; fi if [ $# -gt 0 ]; then main "$@"; else main; fi
fi fi

View File

@ -1,124 +0,0 @@
#! /bin/sh
# $Id$
# vim:et:ft=sh:sts=2:sw=2
#
# Copyright 2008 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License)
# Author: kate.ward@forestent.com (Kate Ward)
#
# shUnit2 unit test suite runner.
#
# This script runs all the unit tests that can be found, and generates a nice
# report of the tests.
MY_NAME=`basename $0`
MY_PATH=`dirname $0`
PREFIX='shunit2_test_'
SHELLS='/bin/sh /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh'
TESTS=''
for test in ${PREFIX}[a-z]*.sh; do
TESTS="${TESTS} ${test}"
done
# load common unit test functions
. ../lib/versions
. ./shunit2_test_helpers
usage()
{
echo "usage: ${MY_NAME} [-e key=val ...] [-s shell(s)] [-t test(s)]"
}
env=''
# process command line flags
while getopts 'e:hs:t:' opt; do
case ${opt} in
e) # set an environment variable
key=`expr "${OPTARG}" : '\([^=]*\)='`
val=`expr "${OPTARG}" : '[^=]*=\(.*\)'`
if [ -z "${key}" -o -z "${val}" ]; then
usage
exit 1
fi
eval "${key}='${val}'"
export ${key}
env="${env:+${env} }${key}"
;;
h) usage; exit 0 ;; # output help
s) shells=${OPTARG} ;; # list of shells to run
t) tests=${OPTARG} ;; # list of tests to run
*) usage; exit 1 ;;
esac
done
shift `expr ${OPTIND} - 1`
# fill shells and/or tests
shells=${shells:-${SHELLS}}
tests=${tests:-${TESTS}}
# error checking
if [ -z "${tests}" ]; then
th_error 'no tests found to run; exiting'
exit 1
fi
cat <<EOF
#------------------------------------------------------------------------------
# System data
#
# test run info
shells: ${shells}
tests: ${tests}
EOF
for key in ${env}; do
eval "echo \"${key}=\$${key}\""
done
echo
# output system data
echo "# system info"
echo "$ date"
date
echo
echo "$ uname -mprsv"
uname -mprsv
#
# run tests
#
for shell in ${shells}; do
echo
# check for existance of shell
if [ ! -x ${shell} ]; then
th_warn "unable to run tests with the ${shell} shell"
continue
fi
cat <<EOF
#------------------------------------------------------------------------------
# Running the test suite with ${shell}
#
EOF
SHUNIT_SHELL=${shell} # pass shell onto tests
shell_name=`basename ${shell}`
shell_version=`versions_shellVersion "${shell}"`
echo "shell name: ${shell_name}"
echo "shell version: ${shell_version}"
# execute the tests
for suite in ${tests}; do
suiteName=`expr "${suite}" : "${PREFIX}\(.*\).sh"`
echo
echo "--- Executing the '${suiteName}' test suite ---"
( exec ${shell} ./${suite} 2>&1; )
done
done

View File

@ -1,104 +1,112 @@
# $Id$
# vim:et:ft=sh:sts=2:sw=2 # vim:et:ft=sh:sts=2:sw=2
# #
# shUnit2 unit test common functions
#
# Copyright 2008 Kate Ward. All Rights Reserved. # Copyright 2008 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License) # Released under the Apache 2.0 license.
# #
# Author: kate.ward@forestent.com (Kate Ward) # Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shunit2
# #
# shUnit2 unit test common functions ### ShellCheck (http://www.shellcheck.net/)
# Commands are purposely escaped so they can be mocked outside shUnit2.
# shellcheck disable=SC1001,SC1012
# expr may be antiquated, but it is the only solution in some cases.
# shellcheck disable=SC2003
# $() are not fully portable (POSIX != portable).
# shellcheck disable=SC2006
# treat unset variables as an error when performing parameter expansion # Treat unset variables as an error when performing parameter expansion.
set -u set -u
# set shwordsplit for zsh # Set shwordsplit for zsh.
[ -n "${ZSH_VERSION:-}" ] && setopt shwordsplit \[ -n "${ZSH_VERSION:-}" ] && setopt shwordsplit
# #
# constants # Constants.
# #
# path to shUnit2 library. can be overridden by setting SHUNIT_INC # Path to shUnit2 library. Can be overridden by setting SHUNIT_INC.
TH_SHUNIT=${SHUNIT_INC:-./shunit2} TH_SHUNIT=${SHUNIT_INC:-./shunit2}; export TH_SHUNIT
# configure debugging. set the DEBUG environment variable to any # Configure debugging. Set the DEBUG environment variable to any
# non-empty value to enable debug output, or TRACE to enable trace # non-empty value to enable debug output, or TRACE to enable trace
# output. # output.
TRACE=${TRACE:+'th_trace '} TRACE=${TRACE:+'th_trace '}
[ -n "${TRACE}" ] && DEBUG=1 \[ -n "${TRACE}" ] && DEBUG=1
[ -z "${TRACE}" ] && TRACE=':' \[ -z "${TRACE}" ] && TRACE=':'
DEBUG=${DEBUG:+'th_debug '} DEBUG=${DEBUG:+'th_debug '}
[ -z "${DEBUG}" ] && DEBUG=':' \[ -z "${DEBUG}" ] && DEBUG=':'
# #
# variables # Variables.
# #
th_RANDOM=0 th_RANDOM=0
# #
# functions # Functions.
# #
# message functions # Logging functions.
th_trace() { echo "${MY_NAME}:TRACE $@" >&2; } th_trace() { echo "${MY_NAME}:TRACE $*" >&2; }
th_debug() { echo "${MY_NAME}:DEBUG $@" >&2; } th_debug() { echo "${MY_NAME}:DEBUG $*" >&2; }
th_info() { echo "${MY_NAME}:INFO $@" >&2; } th_info() { echo "${MY_NAME}:INFO $*" >&2; }
th_warn() { echo "${MY_NAME}:WARN $@" >&2; } th_warn() { echo "${MY_NAME}:WARN $*" >&2; }
th_error() { echo "${MY_NAME}:ERROR $@" >&2; } th_error() { echo "${MY_NAME}:ERROR $*" >&2; }
th_fatal() { echo "${MY_NAME}:FATAL $@" >&2; } th_fatal() { echo "${MY_NAME}:FATAL $*" >&2; }
# output subtest name # Output subtest name.
th_subtest() { echo " $@" >&2; } th_subtest() { echo " $*" >&2; }
th_oneTimeSetUp() th_oneTimeSetUp() {
{ # These files will be cleaned up automatically by shUnit2.
# these files will be cleaned up automatically by shUnit2
stdoutF="${SHUNIT_TMPDIR}/stdout" stdoutF="${SHUNIT_TMPDIR}/stdout"
stderrF="${SHUNIT_TMPDIR}/stderr" stderrF="${SHUNIT_TMPDIR}/stderr"
returnF="${SHUNIT_TMPDIR}/return" returnF="${SHUNIT_TMPDIR}/return"
expectedF="${SHUNIT_TMPDIR}/expected" expectedF="${SHUNIT_TMPDIR}/expected"
export stdoutF stderrF returnF expectedF
} }
# generate a random number # Generate a random number.
th_generateRandom() th_generateRandom() {
{
tfgr_random=${th_RANDOM} tfgr_random=${th_RANDOM}
while [ "${tfgr_random}" = "${th_RANDOM}" ]; do while \[ "${tfgr_random}" = "${th_RANDOM}" ]; do
if [ -n "${RANDOM:-}" ]; then # shellcheck disable=SC2039
if \[ -n "${RANDOM:-}" ]; then
# $RANDOM works # $RANDOM works
# shellcheck disable=SC2039
tfgr_random=${RANDOM}${RANDOM}${RANDOM}$$ tfgr_random=${RANDOM}${RANDOM}${RANDOM}$$
elif [ -r '/dev/urandom' ]; then elif \[ -r '/dev/urandom' ]; then
tfgr_random=`od -vAn -N4 -tu4 </dev/urandom |sed 's/^[^0-9]*//'` tfgr_random=`od -vAn -N4 -tu4 </dev/urandom |sed 's/^[^0-9]*//'`
else else
tfgr_date=`date '+%H%M%S'` tfgr_date=`date '+%H%M%S'`
tfgr_random=`expr ${tfgr_date} \* $$` tfgr_random=`expr "${tfgr_date}" \* $$`
unset tfgr_date unset tfgr_date
fi fi
[ "${tfgr_random}" = "${th_RANDOM}" ] && sleep 1 \[ "${tfgr_random}" = "${th_RANDOM}" ] && sleep 1
done done
th_RANDOM=${tfgr_random} th_RANDOM=${tfgr_random}
unset tfgr_random unset tfgr_random
} }
# this section returns the data section from the specified section of a file. a # This section returns the data section from the specified section of a file. A
# data section is defined by a [header], one or more lines of data, and then a # data section is defined by a [header], one or more lines of data, and then a
# blank line. # blank line.
th_getDataSect() th_getDataSect() {
{
th_sgrep "\\[$1\\]" "$2" |sed '1d' th_sgrep "\\[$1\\]" "$2" |sed '1d'
} }
# this function greps a section from a file. a section is defined as a group of # This function greps a section from a file. a section is defined as a group of
# lines preceeded and followed by blank lines. # lines preceded and followed by blank lines..
th_sgrep() th_sgrep() {
{
th_pattern_=$1 th_pattern_=$1
shift shift
# shellcheck disable=SC2068
sed -e '/./{H;$!d;}' -e "x;/${th_pattern_}/"'!d;' $@ |sed '1d' sed -e '/./{H;$!d;}' -e "x;/${th_pattern_}/"'!d;' $@ |sed '1d'
unset th_pattern_ unset th_pattern_
@ -113,15 +121,14 @@ th_sgrep()
# th_rtrn_: integer: the return value of the subtest performed # th_rtrn_: integer: the return value of the subtest performed
# th_stdout_: string: filename where stdout was redirected to # th_stdout_: string: filename where stdout was redirected to
# th_stderr_: string: filename where stderr was redirected to # th_stderr_: string: filename where stderr was redirected to
th_assertTrueWithNoOutput() th_assertTrueWithNoOutput() {
{
th_test_=$1 th_test_=$1
th_rtrn_=$2 th_rtrn_=$2
th_stdout_=$3 th_stdout_=$3
th_stderr_=$4 th_stderr_=$4
assertTrue "${th_test_}; expected return value of zero" ${th_rtrn_} assertTrue "${th_test_}; expected return value of zero" "${th_rtrn_}"
[ ${th_rtrn_} -ne ${SHUNIT_TRUE} ] && cat "${th_stderr_}" \[ "${th_rtrn_}" -ne "${SHUNIT_TRUE}" ] && \cat "${th_stderr_}"
assertFalse "${th_test_}; expected no output to STDOUT" \ assertFalse "${th_test_}; expected no output to STDOUT" \
"[ -s '${th_stdout_}' ]" "[ -s '${th_stdout_}' ]"
assertFalse "${th_test_}; expected no output to STDERR" \ assertFalse "${th_test_}; expected no output to STDERR" \
@ -145,13 +152,13 @@ th_assertFalseWithOutput()
th_stdout_=$3 th_stdout_=$3
th_stderr_=$4 th_stderr_=$4
assertFalse "${th_test_}; expected non-zero return value" ${th_rtrn_} assertFalse "${th_test_}; expected non-zero return value" "${th_rtrn_}"
assertTrue "${th_test_}; expected output to STDOUT" \ assertTrue "${th_test_}; expected output to STDOUT" \
"[ -s '${th_stdout_}' ]" "[ -s '${th_stdout_}' ]"
assertFalse "${th_test_}; expected no output to STDERR" \ assertFalse "${th_test_}; expected no output to STDERR" \
"[ -s '${th_stderr_}' ]" "[ -s '${th_stderr_}' ]"
[ -s "${th_stdout_}" -a ! -s "${th_stderr_}" ] || \ \[ -s "${th_stdout_}" -a ! -s "${th_stderr_}" ] || \
_th_showOutput ${SHUNIT_FALSE} "${th_stdout_}" "${th_stderr_}" _th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
unset th_test_ th_rtrn_ th_stdout_ th_stderr_ unset th_test_ th_rtrn_ th_stdout_ th_stderr_
} }
@ -164,20 +171,19 @@ th_assertFalseWithOutput()
# th_rtrn_: integer: the return value of the subtest performed # th_rtrn_: integer: the return value of the subtest performed
# th_stdout_: string: filename where stdout was redirected to # th_stdout_: string: filename where stdout was redirected to
# th_stderr_: string: filename where stderr was redirected to # th_stderr_: string: filename where stderr was redirected to
th_assertFalseWithError() th_assertFalseWithError() {
{
th_test_=$1 th_test_=$1
th_rtrn_=$2 th_rtrn_=$2
th_stdout_=$3 th_stdout_=$3
th_stderr_=$4 th_stderr_=$4
assertFalse "${th_test_}; expected non-zero return value" ${th_rtrn_} assertFalse "${th_test_}; expected non-zero return value" "${th_rtrn_}"
assertFalse "${th_test_}; expected no output to STDOUT" \ assertFalse "${th_test_}; expected no output to STDOUT" \
"[ -s '${th_stdout_}' ]" "[ -s '${th_stdout_}' ]"
assertTrue "${th_test_}; expected output to STDERR" \ assertTrue "${th_test_}; expected output to STDERR" \
"[ -s '${th_stderr_}' ]" "[ -s '${th_stderr_}' ]"
[ ! -s "${th_stdout_}" -a -s "${th_stderr_}" ] || \ \[ ! -s "${th_stdout_}" -a -s "${th_stderr_}" ] || \
_th_showOutput ${SHUNIT_FALSE} "${th_stdout_}" "${th_stderr_}" _th_showOutput "${SHUNIT_FALSE}" "${th_stdout_}" "${th_stderr_}"
unset th_test_ th_rtrn_ th_stdout_ th_stderr_ unset th_test_ th_rtrn_ th_stdout_ th_stderr_
} }
@ -186,34 +192,33 @@ th_assertFalseWithError()
# when a non-zero return value is encountered. To properly catch these values, # when a non-zero return value is encountered. To properly catch these values,
# they are either written to disk, or recognized as an error the file is empty. # they are either written to disk, or recognized as an error the file is empty.
th_clearReturn() { cp /dev/null "${returnF}"; } th_clearReturn() { cp /dev/null "${returnF}"; }
th_queryReturn() th_queryReturn() {
{ if \[ -s "${returnF}" ]; then
if [ -s "${returnF}" ]; then th_return=`\cat "${returnF}"`
th_return=`cat "${returnF}"`
else else
th_return=${SHUNIT_ERROR} th_return=${SHUNIT_ERROR}
fi fi
export th_return
} }
# Providing external and internal calls to the showOutput helper function. # Providing external and internal calls to the showOutput helper function.
th_showOutput() { _th_showOutput $@; } th_showOutput() { _th_showOutput "$@"; }
_th_showOutput() _th_showOutput() {
{
_th_return_=$1 _th_return_=$1
_th_stdout_=$2 _th_stdout_=$2
_th_stderr_=$3 _th_stderr_=$3
isSkipping isSkipping
if [ $? -eq ${SHUNIT_FALSE} -a ${_th_return_} != ${SHUNIT_TRUE} ]; then if \[ $? -eq "${SHUNIT_FALSE}" -a "${_th_return_}" != "${SHUNIT_TRUE}" ]; then
if [ -n "${_th_stdout_}" -a -s "${_th_stdout_}" ]; then if \[ -n "${_th_stdout_}" -a -s "${_th_stdout_}" ]; then
echo '>>> STDOUT' >&2 echo '>>> STDOUT' >&2
cat "${_th_stdout_}" >&2 \cat "${_th_stdout_}" >&2
fi fi
if [ -n "${_th_stderr_}" -a -s "${_th_stderr_}" ]; then if \[ -n "${_th_stderr_}" -a -s "${_th_stderr_}" ]; then
echo '>>> STDERR' >&2 echo '>>> STDERR' >&2
cat "${_th_stderr_}" >&2 \cat "${_th_stderr_}" >&2
fi fi
if [ -n "${_th_stdout_}" -o -n "${_th_stderr_}" ]; then if \[ -n "${_th_stdout_}" -o -n "${_th_stderr_}" ]; then
echo '<<< end output' >&2 echo '<<< end output' >&2
fi fi
fi fi
@ -222,7 +227,7 @@ _th_showOutput()
} }
# #
# main # Main.
# #
${TRACE} 'trace output enabled' ${TRACE} 'trace output enabled'

View File

@ -1,160 +0,0 @@
#! /bin/sh
# $Id$
# vim:et:ft=sh:sts=2:sw=2
#
# Copyright 2008 Kate Ward. All Rights Reserved.
# Released under the LGPL (GNU Lesser General Public License)
#
# Author: kate.ward@forestent.com (Kate Ward)
#
# shUnit2 unit tests of miscellaneous things
# load test helpers
. ./shunit2_test_helpers
#------------------------------------------------------------------------------
# suite tests
#
# Note: the test script is prefixed with '#' chars so that shUnit2 does not
# incorrectly interpret the embedded functions as real functions.
testUnboundVariable()
{
unittestF="${SHUNIT_TMPDIR}/unittest"
sed 's/^#//' >"${unittestF}" <<EOF
## treat unset variables as an error when performing parameter expansion
#set -u
#
#boom() { x=\$1; } # this function goes boom if no parameters are passed!
#test_boom()
#{
# assertEquals 1 1
# boom # No parameter given
# assertEquals 0 \$?
#}
#. ${TH_SHUNIT}
EOF
( exec ${SHUNIT_SHELL:-sh} "${unittestF}" >"${stdoutF}" 2>"${stderrF}" )
assertFalse 'expected a non-zero exit value' $?
grep '^ASSERT:Unknown failure' "${stdoutF}" >/dev/null
assertTrue 'assert message was not generated' $?
grep '^Ran [0-9]* test' "${stdoutF}" >/dev/null
assertTrue 'test count message was not generated' $?
grep '^FAILED' "${stdoutF}" >/dev/null
assertTrue 'failure message was not generated' $?
}
testIssue7()
{
( assertEquals 'Some message.' 1 2 >"${stdoutF}" 2>"${stderrF}" )
diff "${stdoutF}" - >/dev/null <<EOF
ASSERT:Some message. expected:<1> but was:<2>
EOF
rtrn=$?
assertEquals ${SHUNIT_TRUE} ${rtrn}
[ ${rtrn} -ne ${SHUNIT_TRUE} ] && cat "${stderrF}" >&2
}
testPrepForSourcing()
{
assertEquals '/abc' `_shunit_prepForSourcing '/abc'`
assertEquals './abc' `_shunit_prepForSourcing './abc'`
assertEquals './abc' `_shunit_prepForSourcing 'abc'`
}
testEscapeCharInStr()
{
actual=`_shunit_escapeCharInStr '\' ''`
assertEquals '' "${actual}"
assertEquals 'abc\\' `_shunit_escapeCharInStr '\' 'abc\'`
assertEquals 'abc\\def' `_shunit_escapeCharInStr '\' 'abc\def'`
assertEquals '\\def' `_shunit_escapeCharInStr '\' '\def'`
actual=`_shunit_escapeCharInStr '"' ''`
assertEquals '' "${actual}"
assertEquals 'abc\"' `_shunit_escapeCharInStr '"' 'abc"'`
assertEquals 'abc\"def' `_shunit_escapeCharInStr '"' 'abc"def'`
assertEquals '\"def' `_shunit_escapeCharInStr '"' '"def'`
actual=`_shunit_escapeCharInStr '$' ''`
assertEquals '' "${actual}"
assertEquals 'abc\$' `_shunit_escapeCharInStr '$' 'abc$'`
assertEquals 'abc\$def' `_shunit_escapeCharInStr '$' 'abc$def'`
assertEquals '\$def' `_shunit_escapeCharInStr '$' '$def'`
# actual=`_shunit_escapeCharInStr "'" ''`
# assertEquals '' "${actual}"
# assertEquals "abc\\'" `_shunit_escapeCharInStr "'" "abc'"`
# assertEquals "abc\\'def" `_shunit_escapeCharInStr "'" "abc'def"`
# assertEquals "\\'def" `_shunit_escapeCharInStr "'" "'def"`
# # must put the backtick in a variable so the shell doesn't misinterpret it
# # while inside a backticked sequence (e.g. `echo '`'` would fail).
# backtick='`'
# actual=`_shunit_escapeCharInStr ${backtick} ''`
# assertEquals '' "${actual}"
# assertEquals '\`abc' \
# `_shunit_escapeCharInStr "${backtick}" ${backtick}'abc'`
# assertEquals 'abc\`' \
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}`
# assertEquals 'abc\`def' \
# `_shunit_escapeCharInStr "${backtick}" 'abc'${backtick}'def'`
}
testEscapeCharInStr_specialChars()
{
# make sure our forward slash doesn't upset sed
assertEquals '/' `_shunit_escapeCharInStr '\' '/'`
# some shells escape these differently
#assertEquals '\\a' `_shunit_escapeCharInStr '\' '\a'`
#assertEquals '\\b' `_shunit_escapeCharInStr '\' '\b'`
}
# Test the various ways of declaring functions.
#
# Prefixing (then stripping) with comment symbol so these functions aren't
# treated as real functions by shUnit2.
testExtractTestFunctions()
{
f="${SHUNIT_TMPDIR}/extract_test_functions"
sed 's/^#//' <<EOF >"${f}"
#testABC() { echo 'ABC'; }
#test_def() {
# echo 'def'
#}
#testG3 ()
#{
# echo 'G3'
#}
#function test4() { echo '4'; }
# test5() { echo '5'; }
#some_test_function() { echo 'some func'; }
#func_with_test_vars() {
# testVariable=1234
#}
EOF
actual=`_shunit_extractTestFunctions "${f}"`
assertEquals 'testABC test_def testG3 test4 test5' "${actual}"
}
#------------------------------------------------------------------------------
# suite functions
#
setUp()
{
for f in ${expectedF} ${stdoutF} ${stderrF}; do
cp /dev/null ${f}
done
}
oneTimeSetUp()
{
th_oneTimeSetUp
}
# load and run shUnit2
[ -n "${ZSH_VERSION:-}" ] && SHUNIT_PARENT=$0
. ${TH_SHUNIT}

165
dev/tests/shunit2/test_runner Executable file
View File

@ -0,0 +1,165 @@
#! /bin/sh
# vim:et:ft=sh:sts=2:sw=2
#
# Unit test suite runner.
#
# Copyright 2008-2017 Kate Ward. All Rights Reserved.
# Released under the Apache 2.0 license.
#
# Author: kate.ward@forestent.com (Kate Ward)
# https://github.com/kward/shlib
#
# This script runs all the unit tests that can be found, and generates a nice
# report of the tests.
#
### ShellCheck (http://www.shellcheck.net/)
# Disable source following.
# shellcheck disable=SC1090,SC1091
# expr may be antiquated, but it is the only solution in some cases.
# shellcheck disable=SC2003
# $() are not fully portable (POSIX != portable).
# shellcheck disable=SC2006
# Return if test_runner already loaded.
[ -z "${RUNNER_LOADED:-}" ] || return 0
RUNNER_LOADED=0
RUNNER_ARGV0=`basename "$0"`
RUNNER_SHELLS='/bin/sh ash /bin/bash /bin/dash /bin/ksh /bin/pdksh /bin/zsh'
RUNNER_TEST_SUFFIX='_test.sh'
runner_warn() { echo "runner:WARN $*" >&2; }
runner_error() { echo "runner:ERROR $*" >&2; }
runner_fatal() { echo "runner:FATAL $*" >&2; exit 1; }
runner_usage() {
echo "usage: ${RUNNER_ARGV0} [-e key=val ...] [-s shell(s)] [-t test(s)]"
}
_runner_tests() { echo ./*${RUNNER_TEST_SUFFIX} |sed 's#./##g'; }
_runner_testName() {
# shellcheck disable=SC1117
_runner_testName_=`expr "${1:-}" : "\(.*\)${RUNNER_TEST_SUFFIX}"`
if [ -n "${_runner_testName_}" ]; then
echo "${_runner_testName_}"
else
echo 'unknown'
fi
unset _runner_testName_
}
main() {
# Find and load versions library.
for _runner_dir_ in . ${LIB_DIR:-lib}; do
if [ -r "${_runner_dir_}/versions" ]; then
_runner_lib_dir_="${_runner_dir_}"
break
fi
done
[ -n "${_runner_lib_dir_}" ] || runner_fatal 'Unable to find versions library.'
. "${_runner_lib_dir_}/versions" || runner_fatal 'Unable to load versions library.'
unset _runner_dir_ _runner_lib_dir_
# Process command line flags.
env=''
while getopts 'e:hs:t:' opt; do
case ${opt} in
e) # set an environment variable
key=`expr "${OPTARG}" : '\([^=]*\)='`
val=`expr "${OPTARG}" : '[^=]*=\(.*\)'`
# shellcheck disable=SC2166
if [ -z "${key}" -o -z "${val}" ]; then
runner_usage
exit 1
fi
eval "${key}='${val}'"
eval "export ${key}"
env="${env:+${env} }${key}"
;;
h) runner_usage; exit 0 ;; # help output
s) shells=${OPTARG} ;; # list of shells to run
t) tests=${OPTARG} ;; # list of tests to run
*) runner_usage; exit 1 ;;
esac
done
shift "`expr ${OPTIND} - 1`"
# Fill shells and/or tests.
shells=${shells:-${RUNNER_SHELLS}}
[ -z "${tests}" ] && tests=`_runner_tests`
# Error checking.
if [ -z "${tests}" ]; then
runner_error 'no tests found to run; exiting'
exit 1
fi
cat <<EOF
#------------------------------------------------------------------------------
# System data.
#
$ uname -mprsv
`uname -mprsv`
OS Name: `versions_osName`
OS Version: `versions_osVersion`
### Test run info.
shells: ${shells}
tests: ${tests}
EOF
for key in ${env}; do
eval "echo \"${key}=\$${key}\""
done
# Run tests.
for shell in ${shells}; do
echo
cat <<EOF
#------------------------------------------------------------------------------
# Running the test suite with ${shell}.
#
EOF
# Check for existence of shell.
shell_bin=${shell}
shell_name=''
shell_present=${FALSE}
case ${shell} in
ash)
shell_bin=`which busybox |grep -v '^no busybox'`
[ $? -eq "${TRUE}" -a -n "${shell_bin}" ] && shell_present="${TRUE}"
shell_bin="${shell_bin} ash"
shell_name=${shell}
;;
*)
[ -x "${shell_bin}" ] && shell_present="${TRUE}"
shell_name=`basename "${shell}"`
;;
esac
if [ "${shell_present}" -eq "${FALSE}" ]; then
runner_warn "unable to run tests with the ${shell_name} shell"
continue
fi
shell_version=`versions_shellVersion "${shell}"`
echo "shell name: ${shell_name}"
echo "shell version: ${shell_version}"
# Execute the tests.
for t in ${tests}; do
echo
echo "--- Executing the '`_runner_testName "${t}"`' test suite. ---"
# ${shell_bin} needs word splitting.
# shellcheck disable=SC2086
( exec ${shell_bin} "./${t}" 2>&1; )
done
done
}
# Execute main() if this is run in standalone mode (i.e. not from a unit test).
[ -z "${SHUNIT_VERSION}" ] && main "$@"

View File

@ -1,10 +1,10 @@
#!/usr/bin/env bash #!/usr/bin/env bash
###### obackup - Local or Remote, push or pull backup script for files & mysql databases ###### obackup - Local or Remote, push or pull backup script for files & mysql databases
###### (C) 2013-2016 by Orsiris de Jong (www.netpower.fr) ###### (C) 2013-2019 by Orsiris de Jong (www.netpower.fr)
###### obackup v2.1x config file rev 2017010201
###### GENERAL BACKUP OPTIONS [GENERAL]
CONFIG_FILE_REVISION=2.1
## Backup identification string. ## Backup identification string.
INSTANCE_ID="test-backup" INSTANCE_ID="test-backup"
@ -13,14 +13,14 @@ INSTANCE_ID="test-backup"
LOGFILE="" LOGFILE=""
## Elements to backup ## Elements to backup
SQL_BACKUP=yes SQL_BACKUP=true
FILE_BACKUP=yes FILE_BACKUP=true
## Backups can be done local, pulled from another server or pushed to a backup server. Available options are [local,pull,push]. ## Backups can be done local, pulled from another server or pushed to a backup server. Available options are [local,pull,push].
## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server. ## Pulled backups are the safest option, as the backup server contains the RSA key and cannot be compromised by another server.
BACKUP_TYPE=local BACKUP_TYPE=local
###### BACKUP STORAGE [BACKUP STORAGE]
## Storage paths of the backups (absolute paths of the local or remote system). Please use ${HOME} instead of ~ if needed. ## Storage paths of the backups (absolute paths of the local or remote system). Please use ${HOME} instead of ~ if needed.
SQL_STORAGE="/home/storage/backup/sql" SQL_STORAGE="/home/storage/backup/sql"
@ -29,7 +29,7 @@ FILE_STORAGE="/home/storage/backup/files"
## Backup encryption using GPG and rsync. ## Backup encryption using GPG and rsync.
## Push backups get encrypted locally in CRYPT_STORAGE before they are sent to the remote system ## Push backups get encrypted locally in CRYPT_STORAGE before they are sent to the remote system
## Local and pull backups get encrypted after backup, in CRYPT_STORAGE ## Local and pull backups get encrypted after backup, in CRYPT_STORAGE
ENCRYPTION=no ENCRYPTION=false
## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system) ## Backup encryption needs a temporary storage space in order to encrypt files before sending them (absolute paths of the local or remote system)
## In case of a pull backup, an encrypted copy of FILE_BACKUP goes here ## In case of a pull backup, an encrypted copy of FILE_BACKUP goes here
@ -38,28 +38,28 @@ CRYPT_STORAGE=/home/storage/backup/crpyt
## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys ## GPG recipient (pubkey for this recipient must exist, see gpg2 --list-keys or gpg --list-keys
GPG_RECIPIENT="John Doe" GPG_RECIPIENT="John Doe"
## Use n CPUs for encryption / decryption where n is an integer ## Use n CPUs for encryption / decryption where n is an integer. Defaults to 1
PARALLEL_ENCRYPTION_PROCESSES= PARALLEL_ENCRYPTION_PROCESSES=
## Create backup directories if they do not exist ## Create backup directories if they do not exist
CREATE_DIRS=yes CREATE_DIRS=true
## Keep absolute source path in your backup, eg: /your/backup/storage/the/remote/server/files ## Keep absolute source path in your backup, eg: /your/backup/storage/the/remote/server/files
## You should leave this enabled if you intend to use 'backup task division' functionality of oBackup, or everything will end up in the same directory. ## You should leave this enabled if you intend to use 'backup task division' functionality of oBackup, or everything will end up in the same directory.
KEEP_ABSOLUTE_PATHS=yes KEEP_ABSOLUTE_PATHS=true
## Generate an alert if backup size is lower than given value in Kb (this can also help identifying empty mount dirs). ## Generate an alert if backup size is lower than given value in Kb (this can also help identifying empty mount dirs).
BACKUP_SIZE_MINIMUM=1024 BACKUP_SIZE_MINIMUM=1024
## Check backup size before proceeding ## Check backup size before proceeding
GET_BACKUP_SIZE=yes GET_BACKUP_SIZE=true
## Generate an alert if storage free space is lower than given value in Kb. ## Generate an alert if storage free space is lower than given value in Kb.
## Keep in mind that disabling backup file size test will only test min space against SQL backup size. ## Keep in mind that disabling backup file size test will only test min space against SQL backup size.
SQL_WARN_MIN_SPACE=1048576 SQL_WARN_MIN_SPACE=1048576
FILE_WARN_MIN_SPACE=1048576 FILE_WARN_MIN_SPACE=1048576
###### REMOTE ONLY OPTIONS [REMOTE_OPTIONS]
## In case of pulled or pushed backups, remote system URI needs to be supplied. ## In case of pulled or pushed backups, remote system URI needs to be supplied.
REMOTE_SYSTEM_URI="ssh://backupuser@remote.system.tld:22/" REMOTE_SYSTEM_URI="ssh://backupuser@remote.system.tld:22/"
@ -70,35 +70,40 @@ SSH_RSA_PRIVATE_KEY="${HOME}/.ssh/id_rsa"
## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed. ## Alternatively, you may specify an SSH password file (less secure). Needs sshpass utility installed.
SSH_PASSWORD_FILE="" SSH_PASSWORD_FILE=""
## When using ssh filter, you must specify a remote token matching the one setup in authorized_keys
_REMOTE_TOKEN=SomeAlphaNumericToken9
## ssh compression should be used unless your remote connection is good enough (LAN) ## ssh compression should be used unless your remote connection is good enough (LAN)
SSH_COMPRESSION=yes SSH_COMPRESSION=true
## Ignore ssh known hosts verification. DANGER WILL ROBINSON DANGER: This can lead to security risks. Only enable if you know what you're doing. ## Ignore ssh known hosts verification. DANGER WILL ROBINSON DANGER: This can lead to security risks. Only enable if you know what you're doing.
## Works on Redhat / CentOS, doesn't work on Debian / Ubunutu ## Works on Redhat / CentOS, doesn't work on Debian / Ubunutu
SSH_IGNORE_KNOWN_HOSTS=no SSH_IGNORE_KNOWN_HOSTS=false
## Remote rsync executable path. Leave this empty in most cases ## Remote rsync executable path. Leave this empty in most cases
RSYNC_REMOTE_PATH="" RSYNC_REMOTE_PATH=""
## Check for connectivity to remote host before launching remote backup tasks. Be sure the hosts responds to ping. Failing to ping will skip current task. ## Check for connectivity to remote host before launching remote backup tasks. Be sure the hosts responds to ping. Failing to ping will skip current task.
REMOTE_HOST_PING=yes REMOTE_HOST_PING=true
## Check for internet access by pinging one or more 3rd party hosts before remote backup tasks. Leave empty if you don't want this check to be be performed. Failing to ping will skip current task. ## Check for internet access by pinging one or more 3rd party hosts before remote backup tasks. Leave empty if you don't want this check to be be performed. Failing to ping will skip current task.
REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com" REMOTE_3RD_PARTY_HOSTS="www.kernel.org www.google.com"
## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled. ## If enabled, commands will be executed as superuser on remote side. See documentation for /etc/sudoers configuration ("find", "du", "tee" and "rsync" need to be allowed). Requiretty needs to be disabled.
SUDO_EXEC=no SUDO_EXEC=false
###### DATABASE SPECIFIC OPTIONS [DATABASE BACKUP SETTINGS]
## Database backup user (should be the same you are running obackup with) ## Database backup user (should be the same you are running obackup with)
SQL_USER=root SQL_USER=root
## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list. ## Enabling the following option will save all databases on local or remote given SQL instance except the ones specified in the exclude list.
## Every found database will be backed up as separate backup task. ## Every found database will be backed up as separate backup task.
DATABASES_ALL=yes DATABASES_ALL=true
DATABASES_ALL_EXCLUDE_LIST="test;mysql" DATABASES_ALL_EXCLUDE_LIST="test;mysql"
## Alternatively, if DATABASES_ALL=no, you can specify a list of databases to backup separated by semi-colons. DATABASES_LIST=""
## Alternatively, if DATABASES_ALL=false, you can specify a list of databases to backup separated by semi-colons.
#DATABASES_LIST="somedatabase" #DATABASES_LIST="somedatabase"
## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task. ## Max backup execution time per Database task. Soft max exec time generates a warning only. Hard max exec time generates a warning and stops current backup task.
@ -115,12 +120,13 @@ MYSQLDUMP_OPTIONS="--opt --single-transaction"
## If you use encryption, compression will only bring small benefits as GPG already has pretty good compression included ## If you use encryption, compression will only bring small benefits as GPG already has pretty good compression included
COMPRESSION_LEVEL=3 COMPRESSION_LEVEL=3
###### FILES SPECIFIC OPTIONS [FILE BACKUP SETTINGS]
## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task. ## File backups are divided in tasks. Every directory in DIRECTORY_LIST will be processed as a unique task.
## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task. ## Every subdirectory of each directory in RECURSIVE_DIRECTORY_LIST will be processed as a unique task.
## Example: RECURSIVE_DIRECTORY_LIST="/home;/var" will create backup tasks tasks "/home/dir1, "/home/dir2", ... "/home/dirN", "/var/log", "/var/lib"... "/var/something". ## Example: RECURSIVE_DIRECTORY_LIST="/home;/var" will create backup tasks tasks "/home/dir1, "/home/dir2", ... "/home/dirN", "/var/log", "/var/lib"... "/var/something".
## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST. ## You can exclude directories from the avove backup task creation, ex: avoid backing up "/home/dir2" by adding it to RECURSIVE_EXCLUDE_LIST.
## Note that since we recurse only by one level, excluding /home/dir2/somedir won't have any effect.
## Please use ${HOME} instead of ~ if needed. ## Please use ${HOME} instead of ~ if needed.
## Directories backup list. List of semicolon separated directories that will be backed up. ## Directories backup list. List of semicolon separated directories that will be backed up.
@ -132,10 +138,10 @@ RECURSIVE_EXCLUDE_LIST="/home/backupuser;/home/lost+found"
RSYNC_PATTERN_FIRST=include RSYNC_PATTERN_FIRST=include
## List of files / directories to incldue / exclude from sync on both sides (see rsync patterns, wildcards work). ## List of files / directories to incldue / exclude from sync on both sides (see rsync patterns, wildcards work).
## Paths are relative to sync dirs. List elements are separated by a semicolon. ## Paths are relative to sync dirs. List elements are separated by a semicolon. Specifying "cache" will remove every found cache subdirectory.
RSYNC_INCLUDE_PATTERN="" RSYNC_INCLUDE_PATTERN=""
RSYNC_EXCLUDE_PATTERN="" RSYNC_EXCLUDE_PATTERN=""
#RSYNC_EXCLUDE_PATTERN="tmp;archives" #RSYNC_EXCLUDE_PATTERN="tmp;archives;cache"
## Files that contains lists of files / directories to include / exclude from sync on both sides. Leave this empty if you don't want to use an exclusion file. ## Files that contains lists of files / directories to include / exclude from sync on both sides. Leave this empty if you don't want to use an exclusion file.
## This file has to be in the same directory as the config file ## This file has to be in the same directory as the config file
@ -149,43 +155,44 @@ PATH_SEPARATOR_CHAR=";"
## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here ## Optional arguments passed to rsync executable. The following are already managed by the program and shoul never be passed here
## -rltD -n -P -o -g --executability -A -X -zz -L -K -H -8 -u -i --stats --checksum --bwlimit --partial --partial-dir --exclude --exclude-from --include--from --no-whole-file --whole-file --list-only ## -rltD -n -P -o -g --executability -A -X -zz -L -K -H -8 -u -i --stats --checksum --bwlimit --partial --partial-dir --exclude --exclude-from --include--from --no-whole-file --whole-file --list-only
## When dealing with different filesystems for sync, or using SMB mountpoints, try adding --modify-window=2 --omit-dir-times as optional arguments
RSYNC_OPTIONAL_ARGS="" RSYNC_OPTIONAL_ARGS=""
## Preserve basic linux permissions ## Preserve basic linux permissions
PRESERVE_PERMISSIONS=yes PRESERVE_PERMISSIONS=true
PRESERVE_OWNER=yes PRESERVE_OWNER=true
PRESERVE_GROUP=yes PRESERVE_GROUP=true
## On MACOS X, does not work and will be ignored ## On MACOS X, does not work and will be ignored
PRESERVE_EXECUTABILITY=yes PRESERVE_EXECUTABILITY=true
## Preserve ACLS. Make sure source and target FS can hold same ACLs or you'll get loads of errors. ## Preserve ACLS. Make sure source and target FS can hold same ACLs or you'll get loads of errors.
PRESERVE_ACL=no PRESERVE_ACL=false
## Preserve Xattr. MAke sure source and target FS can hold same Xattr or you'll get loads of errors. ## Preserve Xattr. MAke sure source and target FS can hold same Xattr or you'll get loads of errors.
PRESERVE_XATTR=no PRESERVE_XATTR=false
## Transforms symlinks into referent files/dirs ## Transforms symlinks into referent files/dirs
COPY_SYMLINKS=yes COPY_SYMLINKS=true
## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root. ## Treat symlinked dirs as dirs. CAUTION: This also follows symlinks outside of the replica root.
KEEP_DIRLINKS=yes KEEP_DIRLINKS=true
## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them. ## Preserve hard links. Make sure source and target FS can manage hard links or you will lose them.
PRESERVE_HARDLINKS=no PRESERVE_HARDLINKS=false
## Let RSYNC compress file transfers. Do not use this on local-local backup schemes. Also, this is not useful if SSH compression is enabled. ## Let RSYNC compress file transfers. Do not use this on local-local backup schemes. Also, this is not useful if SSH compression is enabled.
RSYNC_COMPRESS=no RSYNC_COMPRESS=false
## Max execution time per file backup task. Soft is warning only. Hard is warning, stopping backup and processing next one one file list. Tilme is specified in seconds ## Max execution time per file backup task. Soft is warning only. Hard is warning, stopping backup and processing next one one file list. Tilme is specified in seconds
SOFT_MAX_EXEC_TIME_FILE_TASK=3600 SOFT_MAX_EXEC_TIME_FILE_TASK=3600
HARD_MAX_EXEC_TIME_FILE_TASK=7200 HARD_MAX_EXEC_TIME_FILE_TASK=7200
## Keep partial uploads that can be resumed on next run, experimental feature ## Keep partial uploads that can be resumed on next run, experimental feature
PARTIAL=no PARTIAL=false
## Delete files on destination that vanished from source. Do not turn this on unless you enabled backup rotation or a snapshotting FS like zfs to keep those vanished files on the destination. ## Delete files on destination that vanished from source. Do not turn this on unless you enabled backup rotation or a snapshotting FS like zfs to keep those vanished files on the destination.
DELETE_VANISHED_FILES=no DELETE_VANISHED_FILES=false
## Use delta copy algortithm (usefull when local paths are network drives), defaults to yes ## Use delta copy algortithm (usefull when local paths are network drives), defaults to true
DELTA_COPIES=yes DELTA_COPIES=true
## Bandwidth limit Kbytes / second for file backups. Leave 0 to disable limitation. ## Bandwidth limit Kbytes / second for file backups. Leave 0 to disable limitation.
BANDWIDTH=0 BANDWIDTH=0
@ -193,11 +200,16 @@ BANDWIDTH=0
## Paranoia option. Don't change this unless you read the documentation. ## Paranoia option. Don't change this unless you read the documentation.
RSYNC_EXECUTABLE=rsync RSYNC_EXECUTABLE=rsync
###### ALERT OPTIONS [ALERT_OPTIONS]
## Alert email addresses separated by a space character ## Alert email addresses separated by a space character
DESTINATION_MAILS="your@mail.address" DESTINATION_MAILS="your@mail.address"
## Optional change of mail body encoding (using iconv)
## By default, all mails are sent in UTF-8 format without header (because of maximum compatibility of all platforms)
## You may specify an optional encoding here (like "ISO-8859-1" or whatever iconv can handle)
MAIL_BODY_CHARSET=""
## Environment specific mail options (used with busybox sendemail, mailsend.exe from muquit, http://github.com/muquit/mailsend or sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail) ## Environment specific mail options (used with busybox sendemail, mailsend.exe from muquit, http://github.com/muquit/mailsend or sendemail.exe from Brandon Zehm, http://caspian.dotconf.net/menu/Software/SendEmail)
SENDER_MAIL="alert@your.system.tld" SENDER_MAIL="alert@your.system.tld"
SMTP_SERVER=smtp.your.isp.tld SMTP_SERVER=smtp.your.isp.tld
@ -207,7 +219,7 @@ SMTP_ENCRYPTION=none
SMTP_USER= SMTP_USER=
SMTP_PASSWORD= SMTP_PASSWORD=
###### GENERAL BACKUP OPTIONS [BACKUP SETTINGS]
## Max execution time of whole backup process. Soft max exec time generates a warning only. ## Max execution time of whole backup process. Soft max exec time generates a warning only.
## Hard max exec time generates a warning and stops the whole backup execution. ## Hard max exec time generates a warning and stops the whole backup execution.
@ -218,12 +230,12 @@ HARD_MAX_EXEC_TIME_TOTAL=36000
KEEP_LOGGING=1801 KEEP_LOGGING=1801
## Backup Rotation. You may rotate backups if you don't use snapshots on your backup server. ## Backup Rotation. You may rotate backups if you don't use snapshots on your backup server.
ROTATE_SQL_BACKUPS=no ROTATE_SQL_BACKUPS=false
ROTATE_SQL_COPIES=7 ROTATE_SQL_COPIES=7
ROTATE_FILE_BACKUPS=no ROTATE_FILE_BACKUPS=false
ROTATE_FILE_COPIES=7 ROTATE_FILE_COPIES=7
###### EXECUTION HOOKS [EXECUTION_HOOKS]
## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set). ## Commands can will be run before and / or after backup execution (remote execution will only happen if REMOTE_BACKUP is set).
## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data. ## This is useful to make a snapshot before backing up data, or even handle snapshots of backed up data.
@ -238,7 +250,7 @@ MAX_EXEC_TIME_PER_CMD_BEFORE=0
MAX_EXEC_TIME_PER_CMD_AFTER=0 MAX_EXEC_TIME_PER_CMD_AFTER=0
## Stops whole backup execution if one of the above commands fail ## Stops whole backup execution if one of the above commands fail
STOP_ON_CMD_ERROR=no STOP_ON_CMD_ERROR=false
## Run local and remote after backup cmd's even on failure ## Run local and remote after backup cmd's even on failure
RUN_AFTER_CMD_ON_ERROR=no RUN_AFTER_CMD_ON_ERROR=false

2875
install.sh

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

2156
obackup.sh

File diff suppressed because it is too large Load Diff

View File

@ -5,102 +5,49 @@
##### It will filter the commands that can be run remotely via ssh. ##### It will filter the commands that can be run remotely via ssh.
##### Please chmod 755 and chown root:root this file ##### Please chmod 755 and chown root:root this file
##### Obackup needed commands: rsync find du mysql mysqldump (sudo) ##### Any command that has env _REMOTE_TOKEN= with the corresponding token in it will be run
##### Osync needed commands: rsync find du echo mkdir rm if df (sudo) ##### Any other command will return a "syntax error"
SCRIPT_BUILD=2016031401 ##### For details, see ssh_filter.log
## If enabled, execution of "sudo" command will be allowed. SCRIPT_BUILD=2017020802
## Allow sudo
SUDO_EXEC=yes SUDO_EXEC=yes
## Paranoia option. Don't change this unless you read the documentation and still feel concerned about security issues.
RSYNC_EXECUTABLE=rsync
## Enable other commands, useful for remote execution hooks like remotely creating snapshots.
CMD1=""
CMD2=""
CMD3=""
LOG_FILE=~/.ssh/ssh_filter.log ## Log all valid commands too
_DEBUG=no
## Set remote token in authorized_keys
if [ "$1" != "" ]; then
_REMOTE_TOKEN="${1}"
fi
LOG_FILE="${HOME}/.ssh/ssh_filter.log"
function Log { function Log {
DATE=$(date) DATE=$(date)
echo "$DATE - $1" >> $LOG_FILE echo "$DATE - $1" >> "$LOG_FILE"
} }
function Go { function Go {
if [ "$_DEBUG" == "yes" ]; then
Log "Executing [$SSH_ORIGINAL_COMMAND]."
fi
eval "$SSH_ORIGINAL_COMMAND" eval "$SSH_ORIGINAL_COMMAND"
} }
case ${SSH_ORIGINAL_COMMAND%% *} in case "${SSH_ORIGINAL_COMMAND}" in
"$RSYNC_EXECUTABLE") *"env _REMOTE_TOKEN=$_REMOTE_TOKEN"*)
Go ;; if [ "$SUDO_EXEC" != "yes" ] && [[ $SSH_ORIGINAL_COMMAND == *"sudo "* ]]; then
"echo") Log "Command [$SSH_ORIGINAL_COMMAND] contains sudo which is not allowed."
Go ;; echo "Syntax error unexpected end of file"
"find")
Go ;;
"du")
Go ;;
"mkdir")
Go ;;
"rm")
Go ;;
"df")
Go ;;
"mv")
Go ;;
"$CMD1")
if [ "$CMD1" != "" ]; then
Go
fi
;;
"$CMD2")
if [ "$CMD2" != "" ]; then
Go
fi
;;
"$CMD3")
if [ "$CMD3" != "" ]; then
Go
fi
;;
"sudo")
if [ "$SUDO_EXEC" == "yes" ]; then
if [[ "$SSH_ORIGINAL_COMMAND" == "sudo $RSYNC_EXECUTABLE"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo du"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo find"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo mkdir"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo rm"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo echo"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo df"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo mv"* ]]; then
Go
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo $CMD1"* ]]; then
if [ "$CMD1" != "" ]; then
Go
fi
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo $CMD2"* ]]; then
if [ "$CMD2" != "" ]; then
Go
fi
elif [[ "$SSH_ORIGINAL_COMMAND" == "sudo $CMD3"* ]]; then
if [ "$CMD3" != "" ]; then
Go
fi
else
Log "Command [$SSH_ORIGINAL_COMMAND] not allowed."
exit 1
fi
else
Log "Command [$SSH_ORIGINAL_COMMAND] not allowed. sudo not enabled."
exit 1 exit 1
fi fi
Go
;; ;;
*) *)
Log "Command [$SSH_ORIGINAL_COMMAND] not allowed." Log "Command [$SSH_ORIGINAL_COMMAND] not allowed."
echo "Syntax error near unexpected token"
exit 1 exit 1
;;
esac esac

View File

@ -6,8 +6,8 @@ AUTHOR="(C) 2016 by Orsiris de Jong"
CONTACT="http://www.netpower.fr/obacup - ozy@netpower.fr" CONTACT="http://www.netpower.fr/obacup - ozy@netpower.fr"
OLD_PROGRAM_VERSION="v1.x" OLD_PROGRAM_VERSION="v1.x"
NEW_PROGRAM_VERSION="v2.1x" NEW_PROGRAM_VERSION="v2.1x"
CONFIG_FILE_VERSION=2017010201 CONFIG_FILE_REVISION=2.1
PROGRAM_BUILD=2016113001 PROGRAM_BUILD=2019102101
if ! type "$BASH" > /dev/null; then if ! type "$BASH" > /dev/null; then
echo "Please run this script only with bash shell. Tested on bash >= 3.2" echo "Please run this script only with bash shell. Tested on bash >= 3.2"
@ -39,6 +39,7 @@ FILE_WARN_MIN_SPACE
REMOTE_SYSTEM_URI REMOTE_SYSTEM_URI
SSH_RSA_PRIVATE_KEY SSH_RSA_PRIVATE_KEY
SSH_PASSWORD_FILE SSH_PASSWORD_FILE
_REMOTE_TOKEN
SSH_COMPRESSION SSH_COMPRESSION
SSH_IGNORE_KNOWN_HOSTS SSH_IGNORE_KNOWN_HOSTS
RSYNC_REMOTE_PATH RSYNC_REMOTE_PATH
@ -81,6 +82,7 @@ DELTA_COPIES
BANDWIDTH BANDWIDTH
RSYNC_EXECUTABLE RSYNC_EXECUTABLE
DESTINATION_MAILS DESTINATION_MAILS
MAIL_BODY_CHARSET
SENDER_MAIL SENDER_MAIL
SMTP_SERVER SMTP_SERVER
SMTP_PORT SMTP_PORT
@ -107,41 +109,42 @@ RUN_AFTER_CMD_ON_ERROR
VALUES=( VALUES=(
test-backup test-backup
'' ''
yes true
yes true
local local
/home/storage/sql /home/storage/sql
/home/storage/files /home/storage/files
no false
/home/storage/crypt /home/storage/crypt
'Your Name used with GPG signature' 'Your Name used with GPG signature'
'' ''
yes true
yes true
1024 1024
yes true
1048576 1048576
1048576 1048576
ssh://backupuser@remote.system.tld:22/ ssh://backupuser@remote.system.tld:22/
${HOME}/.ssh/id_rsa ${HOME}/.ssh/id_rsa
'' ''
yes SomeAlphaNumericToken9
no true
false
'' ''
yes true
'www.kernel.org www.google.com' 'www.kernel.org www.google.com'
no false
root root
yes true
test test
'' ''
3600 3600
7200 7200
'--opt --single-transaction' '--opt --single-transaction'
3 3
/some/path ''
/home ''
/home/backupuser\;/host/lost+found '/lost+found;/tmp'
include include
'' ''
'' ''
@ -149,24 +152,25 @@ include
'' ''
\; \;
'' ''
yes true
yes true
yes true
yes true
no false
no false
yes true
yes true
no false
no false
3600 3600
7200 7200
no false
no false
yes true
0 0
rsync rsync
infrastructure@example.com infrastructure@example.com
''
sender@example.com sender@example.com
smtp.isp.tld smtp.isp.tld
25 25
@ -176,9 +180,9 @@ none
30000 30000
36000 36000
1801 1801
no false
7 7
no false
7 7
'' ''
'' ''
@ -186,8 +190,8 @@ no
'' ''
0 0
0 0
no false
no false
) )
function Usage { function Usage {
@ -218,7 +222,7 @@ function LoadConfigFile {
fi fi
} }
function RewriteOldConfigFiles { function CheckAndBackup {
local config_file="${1}" local config_file="${1}"
if ! grep "BACKUP_ID=" $config_file > /dev/null && ! grep "INSTANCE_ID=" $config_file > /dev/null; then if ! grep "BACKUP_ID=" $config_file > /dev/null && ! grep "INSTANCE_ID=" $config_file > /dev/null; then
@ -232,6 +236,10 @@ function RewriteOldConfigFiles {
echo "Cannot backup config file." echo "Cannot backup config file."
exit 1 exit 1
fi fi
}
function RewriteOldConfigFiles {
local config_file="${1}"
echo "Rewriting config file $config_file" echo "Rewriting config file $config_file"
@ -241,13 +249,13 @@ function RewriteOldConfigFiles {
sed -i'.tmp' 's/^LOCAL_SQL_STORAGE=/SQL_STORAGE=/g' "$config_file" sed -i'.tmp' 's/^LOCAL_SQL_STORAGE=/SQL_STORAGE=/g' "$config_file"
sed -i'.tmp' 's/^LOCAL_FILE_STORAGE=/FILE_STORAGE=/g' "$config_file" sed -i'.tmp' 's/^LOCAL_FILE_STORAGE=/FILE_STORAGE=/g' "$config_file"
sed -i'.tmp' 's/^DISABLE_GET_BACKUP_FILE_SIZE=no/GET_BACKUP_SIZE=yes/g' "$config_file" sed -i'.tmp' 's/^DISABLE_GET_BACKUP_FILE_SIZE=no/GET_BACKUP_SIZE=true/g' "$config_file"
sed -i'.tmp' 's/^DISABLE_GET_BACKUP_FILE_SIZE=yes/GET_BACKUP_SIZE=no/g' "$config_file" sed -i'.tmp' 's/^DISABLE_GET_BACKUP_FILE_SIZE=yes/GET_BACKUP_SIZE=false/g' "$config_file"
sed -i'.tmp' 's/^LOCAL_STORAGE_KEEP_ABSOLUTE_PATHS=/KEEP_ABSOLUTE_PATHS=/g' "$config_file" sed -i'.tmp' 's/^LOCAL_STORAGE_KEEP_ABSOLUTE_PATHS=/KEEP_ABSOLUTE_PATHS=/g' "$config_file"
sed -i'.tmp' 's/^LOCAL_STORAGE_WARN_MIN_SPACE=/SQL_WARN_MIN_SPACE=/g' "$config_file" sed -i'.tmp' 's/^LOCAL_STORAGE_WARN_MIN_SPACE=/SQL_WARN_MIN_SPACE=/g' "$config_file"
if ! grep "^FILE_WARN_MIN_SPACE=" "$config_file" > /dev/null; then if ! grep "^FILE_WARN_MIN_SPACE=" "$config_file" > /dev/null; then
VALUE=$(cat $config_file | grep "SQL_WARN_MIN_SPACE=") VALUE=$(grep "SQL_WARN_MIN_SPACE=" "$config_file")
VALUE=${VALUE#*=} VALUE="${VALUE#*=}"
sed -i'.tmp' '/^SQL_WARN_MIN_SPACE=*/a\'$'\n''FILE_WARN_MIN_SPACE='$VALUE'\'$'\n''' "$config_file" sed -i'.tmp' '/^SQL_WARN_MIN_SPACE=*/a\'$'\n''FILE_WARN_MIN_SPACE='$VALUE'\'$'\n''' "$config_file"
fi fi
sed -i'.tmp' 's/^DIRECTORIES_SIMPLE_LIST=/DIRECTORY_LIST=/g' "$config_file" sed -i'.tmp' 's/^DIRECTORIES_SIMPLE_LIST=/DIRECTORY_LIST=/g' "$config_file"
@ -255,28 +263,29 @@ function RewriteOldConfigFiles {
sed -i'.tmp' 's/^DIRECTORIES_RECURSE_EXCLUDE_LIST=/RECURSIVE_EXCLUDE_LIST=/g' "$config_file" sed -i'.tmp' 's/^DIRECTORIES_RECURSE_EXCLUDE_LIST=/RECURSIVE_EXCLUDE_LIST=/g' "$config_file"
sed -i'.tmp' 's/^ROTATE_BACKUPS=/ROTATE_SQL_BACKUPS=/g' "$config_file" sed -i'.tmp' 's/^ROTATE_BACKUPS=/ROTATE_SQL_BACKUPS=/g' "$config_file"
if ! grep "^ROTATE_FILE_BACKUPS=" "$config_file" > /dev/null; then if ! grep "^ROTATE_FILE_BACKUPS=" "$config_file" > /dev/null; then
VALUE=$(cat $config_file | grep "ROTATE_SQL_BACKUPS=") VALUE=$(grep "ROTATE_SQL_BACKUPS=" "$config_file")
VALUE=${VALUE#*=} VALUE="${VALUE#*=}"
sed -i'.tmp' '/^ROTATE_SQL_BACKUPS=*/a\'$'\n''ROTATE_FILE_BACKUPS='$VALUE'\'$'\n''' "$config_file" sed -i'.tmp' '/^ROTATE_SQL_BACKUPS=*/a\'$'\n''ROTATE_FILE_BACKUPS='$VALUE'\'$'\n''' "$config_file"
fi fi
sed -i'.tmp' 's/^ROTATE_COPIES=/ROTATE_SQL_COPIES=/g' "$config_file" sed -i'.tmp' 's/^ROTATE_COPIES=/ROTATE_SQL_COPIES=/g' "$config_file"
if ! grep "^ROTATE_FILE_COPIES=" "$config_file" > /dev/null; then if ! grep "^ROTATE_FILE_COPIES=" "$config_file" > /dev/null; then
VALUE=$(cat $config_file | grep "ROTATE_SQL_COPIES=") VALUE=$(grep "ROTATE_SQL_COPIES=" "$config_file")
VALUE=${VALUE#*=} VALUE="${VALUE#*=}"
sed -i'.tmp' '/^ROTATE_SQL_COPIES=*/a\'$'\n''ROTATE_FILE_COPIES='$VALUE'\'$'\n''' "$config_file" sed -i'.tmp' '/^ROTATE_SQL_COPIES=*/a\'$'\n''ROTATE_FILE_COPIES='$VALUE'\'$'\n''' "$config_file"
fi fi
REMOTE_BACKUP=$(cat $config_file | grep "REMOTE_BACKUP=") REMOTE_BACKUP=$(grep "REMOTE_BACKUP=" "$config_file")
REMOTE_BACKUP=${REMOTE_BACKUP#*=} REMOTE_BACKUP="${REMOTE_BACKUP#*=}"
if [ "$REMOTE_BACKUP" == "yes" ]; then if [ "$REMOTE_BACKUP" == "yes" ]; then
REMOTE_USER=$(cat $config_file | grep "REMOTE_USER=") REMOTE_USER=$(grep "REMOTE_USER=" "$config_file")
REMOTE_USER=${REMOTE_USER#*=} REMOTE_USER="${REMOTE_USER#*=}"
REMOTE_HOST=$(cat $config_file | grep "REMOTE_HOST=") REMOTE_HOST=$(grep "REMOTE_HOST=" "$config_file")
REMOTE_HOST=${REMOTE_HOST#*=} REMOTE_HOST="${REMOTE_HOST#*=}"
REMOTE_PORT=$(cat $config_file | grep "REMOTE_PORT=") REMOTE_PORT=$(grep "REMOTE_PORT=" "$config_file")
REMOTE_PORT=${REMOTE_PORT#*=} REMOTE_PORT="${REMOTE_PORT#*=}"
REMOTE_SYSTEM_URI="ssh://$REMOTE_USER@$REMOTE_HOST:$REMOTE_PORT/" REMOTE_SYSTEM_URI="ssh://$REMOTE_USER@$REMOTE_HOST:$REMOTE_PORT/"
sed -i'.tmp' 's#^REMOTE_BACKUP=true#REMOTE_SYSTEM_URI='$REMOTE_SYSTEM_URI'#g' "$config_file"
sed -i'.tmp' 's#^REMOTE_BACKUP=yes#REMOTE_SYSTEM_URI='$REMOTE_SYSTEM_URI'#g' "$config_file" sed -i'.tmp' 's#^REMOTE_BACKUP=yes#REMOTE_SYSTEM_URI='$REMOTE_SYSTEM_URI'#g' "$config_file"
sed -i'.tmp' '/^REMOTE_USER==*/d' "$config_file" sed -i'.tmp' '/^REMOTE_USER==*/d' "$config_file"
sed -i'.tmp' '/^REMOTE_HOST==*/d' "$config_file" sed -i'.tmp' '/^REMOTE_HOST==*/d' "$config_file"
@ -299,27 +308,76 @@ function AddMissingConfigOptions {
if ! grep "^${KEYWORDS[$counter]}=" > /dev/null "$config_file"; then if ! grep "^${KEYWORDS[$counter]}=" > /dev/null "$config_file"; then
echo "${KEYWORDS[$counter]} not found" echo "${KEYWORDS[$counter]} not found"
if [ $counter -gt 0 ]; then if [ $counter -gt 0 ]; then
if [ "${VALUES[$counter]}" == true ] || [ "${VALUES[$counter]}" == false ]; then
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'='"${VALUES[$counter]}"'\'$'\n''' "$config_file"
else
sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file" sed -i'.tmp' '/^'${KEYWORDS[$((counter-1))]}'=*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
fi
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Cannot add missing ${[KEYWORDS[$counter]}." echo "Cannot add missing ${[KEYWORDS[$counter]}."
exit 1 exit 1
fi fi
else else
sed -i'.tmp' '/onfig file rev*/a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file" if [ "${VALUES[$counter]}" == true ] || [ "${VALUES[$counter]}" == false ]; then
sed -i'.tmp' '/[GENERAL\]$//a\'$'\n'${KEYWORDS[$counter]}'='"${VALUES[$counter]}"'\'$'\n''' "$config_file"
else
sed -i'.tmp' '/[GENERAL\]$//a\'$'\n'${KEYWORDS[$counter]}'="'"${VALUES[$counter]}"'"\'$'\n''' "$config_file"
fi
if [ $? -ne 0 ]; then
echo "Cannot add missing ${[KEYWORDS[$counter]}."
exit 1
fi
fi fi
echo "Added missing ${KEYWORDS[$counter]} config option with default option [${VALUES[$counter]}]" echo "Added missing ${KEYWORDS[$counter]} config option with default option [${VALUES[$counter]}]"
else
# Not the most elegant but the quickest way :)
if grep "^${KEYWORDS[$counter]}=yes$" > /dev/null "$config_file"; then
sed -i'.tmp' 's/^'${KEYWORDS[$counter]}'=.*/'${KEYWORDS[$counter]}'=true/g' "$config_file"
if [ $? -ne 0 ]; then
echo "Cannot rewrite ${[KEYWORDS[$counter]} boolean to true."
exit 1
fi
elif grep "^${KEYWORDS[$counter]}=no$" > /dev/null "$config_file"; then
sed -i'.tmp' 's/^'${KEYWORDS[$counter]}'=.*/'${KEYWORDS[$counter]}'=false/g' "$config_file"
if [ $? -ne 0 ]; then
echo "Cannot rewrite ${[KEYWORDS[$counter]} boolean to false."
exit 1
fi
fi
fi fi
counter=$((counter+1)) counter=$((counter+1))
done done
} }
function RewriteSections {
local config_file="${1}"
# Earlier config files has GENERAL BACKUP OPTIONS set twice. Let's replace the first one only. sed does a horrible job doing this, at least if portability is required
awk '/###### GENERAL BACKUP OPTIONS/ && !done { gsub(/###### GENERAL BACKUP OPTIONS/, "[GENERAL]"); done=1}; 1' "$config_file" >> "$config_file.tmp"
#sed -i'.tmp' 's/###### GENERAL BACKUP OPTIONS/[GENERAL]/' "$config_file"
# Fix using earlier tmp file from awk
sed 's/###### BACKUP STORAGE/[BACKUP STORAGE]/g' "$config_file.tmp" > "$config_file"
sed -i'.tmp' 's/###### REMOTE ONLY OPTIONS/[REMOTE_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/###### DATABASE SPECIFIC OPTIONS/[DATABASE BACKUP SETTINGS]/g' "$config_file"
sed -i'.tmp' 's/###### FILES SPECIFIC OPTIONS/[FILE BACKUP SETTINGS]/g' "$config_file"
sed -i'.tmp' 's/###### ALERT OPTIONS/[ALERT_OPTIONS]/g' "$config_file"
sed -i'.tmp' 's/###### GENERAL BACKUP OPTIONS/[BACKUP SETTINGS]/g' "$config_file"
sed -i'.tmp' 's/###### EXECUTION HOOKS/[EXECUTION_HOOKS]/g' "$config_file"
}
function UpdateConfigHeader { function UpdateConfigHeader {
local config_file="${1}" local config_file="${1}"
if ! grep "^CONFIG_FILE_REVISION=" > /dev/null "$config_file"; then
if grep "\[GENERAL\]" > /dev/null "$config_file"; then
sed -i'.tmp' '/^\[GENERAL\]$/a\'$'\n'CONFIG_FILE_REVISION=$CONFIG_FILE_REVISION$'\n''' "$config_file"
else
sed -i'.tmp' '/.*onfig file rev.*/a\'$'\n'CONFIG_FILE_REVISION=$CONFIG_FILE_REVISION$'\n''' "$config_file"
fi
# "onfig file rev" to deal with earlier variants of the file # "onfig file rev" to deal with earlier variants of the file
sed -i'.tmp' 's/.*onfig file rev.*/##### '$SUBPROGRAM' config file rev '$CONFIG_FILE_VERSION' '$NEW_PROGRAM_VERSION'/' "$config_file" #sed -i'.tmp' 's/.*onfig file rev.*//' "$config_file"
rm -f "$config_file.tmp" fi
} }
if [ "$1" != "" ] && [ -f "$1" ] && [ -w "$1" ]; then if [ "$1" != "" ] && [ -f "$1" ] && [ -w "$1" ]; then
@ -327,9 +385,13 @@ if [ "$1" != "" ] && [ -f "$1" ] && [ -w "$1" ]; then
# Make sure there is no ending slash # Make sure there is no ending slash
CONF_FILE="${CONF_FILE%/}" CONF_FILE="${CONF_FILE%/}"
LoadConfigFile "$CONF_FILE" LoadConfigFile "$CONF_FILE"
CheckAndBackup "$CONF_FILE"
RewriteSections "$CONF_FILE"
RewriteOldConfigFiles "$CONF_FILE" RewriteOldConfigFiles "$CONF_FILE"
AddMissingConfigOptions "$CONF_FILE" AddMissingConfigOptions "$CONF_FILE"
UpdateConfigHeader "$CONF_FILE" UpdateConfigHeader "$CONF_FILE"
rm -f "$CONF_FILE.tmp"
echo "Configuration file upgrade finished"
else else
Usage Usage
fi fi