big cleanup

This commit is contained in:
willip 2019-03-08 21:11:20 +00:00
parent 1006521775
commit 654a618be9
4 changed files with 39 additions and 78 deletions

View File

@ -4,8 +4,11 @@
- [x] how to make a daemon?
- [x] get argparse working
- [x] implement config file
- [ ] model default system
- [ ] list / add / edit / delete operating systems
- [x] model default system
- [x] list / add / edit / delete operating systems
- [ ] os post processing (i.e. checksum and extraction)
- [ ] flash storage with downloaded os
- [ ] make cli options working
- [ ] model -> storage -> bool overwrite_always false
# cli
@ -22,21 +25,3 @@ positional arguments:
optional arguments:
-h, --help show this help message and exit
```
# model
## operating_system
- string file_name
- string file_url
- string test_name
- string test_url
- string test_sha256
- int interval [ days ], default = 7 days
## storage
- id operating_system
- string label
- string uuid
[- bool overwrite_always false]

View File

@ -5,6 +5,5 @@
"file_url":"https://downloads.raspberrypi.org/raspbian_latest",
"test_name":"raspbian_release-notes.txt",
"test_url":"http://downloads.raspberrypi.org/raspbian/release_notes.txt",
"last_check":""
}
}

5
pyf.py
View File

@ -8,17 +8,16 @@ def log(message, log_file):
def prt(message):
''' print a message with style '''
''' print @message with style '''
print('::: {}'.format(message))
def download(url, file_name, progress=True):
#TODO check if donwload was successful, if not do stuff
''' download @url to @file_name '''
from urllib.request import urlretrieve
if progress:
from tqdm import tqdm
prt('downloading {}'.format(url))
class TqdmUpTo(tqdm):
def update_to(self, b=1, bsize=1, tsize=None):
if tsize is not None:

View File

@ -28,94 +28,72 @@ def init_parser():
return parser.parse_args()
# c: is there a *testfile_sha256* sum?
# -> no: download image file
# -> yes: goto d
#
# d: download testfile
# is sha256 sum of testfile and *testfile_sha256* equivalent?
# -> no: download image file
# -> yes: break
def check_update(database):
''' check if update is necessary '''
from urllib.parse import urlparse
def check_update(distributions):
''' check if distributions are up to date '''
from os import path
from datetime import datetime
for name, dist in database.items():
prt('checking operating system {} ...'.format(name))
if dist['file_url'] == '':
prt('database.txt: `file_url` not specified, skipping ...')
continue
#TODO check if donwload was successful, if not do stuff
if dist['file_name'] == '':
prt('database.txt: `file_name` not specified')
#TODO get real file_name and save it to database.txt
file_name = urlparse(dist['file_url'])
file_name = file_name.path
file_name = '{}/dist/{}'.format(WORKDIR, path.basename(file_name))
download(dist['file_url'], file_name)
# go through distributions
for name, dist in distributions.items():
prt('checking operating system {} for updates ...'.format(name))
if not dist['file_url'] or not dist['interval']:
prt('ERROR in distributions.json: *file_url* and *interval* \
must be specified, skipping {} ...'.format(name))
continue
# get absolute file name
if not dist['file_name']:
dist['file_name'] = dist['file_url'].split('/')[-1]
file_name = '{}/dist/{}'.format(WORKDIR, dist['file_name'])
if not path.isfile(file_name):
prt('database.txt: `file_name` {} does not exist on your filesystem'.format(file_name))
download(dist['file_url'], file_name)
continue
if dist['interval'] == '':
prt('database.txt: `interval` not specified')
prt('{} does not yet exist, downloading ...'.format(file_url))
download(dist['file_url'], file_name)
continue
# is the file younger than interval?
file_mod = datetime.fromtimestamp(path.getmtime(file_name))
file_age = (datetime.now() - file_mod).days
if file_age < int(dist['interval']):
prt('{} is younger than {} days, skipping ...'.format(
name, dist['interval']))
continue
else:
prt('{} is older than {} days'.format(name, file_age))
prt('{} is older than {} days, checking test file ...'.format(name, file_age))
if dist['test_url'] == '':
prt('database.txt: `test_url` not specified')
if not dist['test_url']:
prt('distributions.json: `test_url` not specified, downloading {} ...'.format(file_url))
download(dist['file_url'], file_name)
continue
if dist['test_name'] == '':
prt('database.txt: `test_name` not specified')
#TODO write test_name to database.txt
test_name = urlparse(dist['test_url'])
test_name = test_name.path
test_name = '{}/dist/{}'.format(WORKDIR, path.basename(test_name))
else:
test_name = '{}/dist/{}'.format(WORKDIR, dist['test_name'])
# get absolute test name
if not dist['test_name']:
dist['test_name'] = dist['test_url'].split('/')[-1]
test_name = '{}/dist/{}'.format(WORKDIR, dist['test_name'])
if not path.isfile(test_name):
prt('database.txt: `test_name` {} does not exist on your filesystem'.format(test_name))
prt('{} does not yet exist, downloading ...'.format(dist['test_url']))
download(dist['test_url'], test_name)
download(dist['file_url'], file_name)
continue
prt(test_name)
# did sha256 of test files change?
test_sha256_old = sha256(test_name)
download(dist['test_url'], test_name)
test_sha256_new = sha256(test_name)
if not test_sha256_old == test_sha256_new:
prt('`test_file` {} has changed'.format(test_name))
prt('`test_file` {} has changed, downloading {}'.format(test_name, dist['test_url']))
download(dist['file_url'], file_name)
continue
prt('{} is still up to date, skipping ...'.format(name))
def read_os_database():
''' read the os database.txt file
@return a json object of the database
'''
with open('{}/database.txt'.format(WORKDIR), 'r') as f:
def get_distributions():
''' read distributions.json and @return it as json object '''
with open('{}/distributions.json'.format(WORKDIR), 'r') as f:
#TODO catch json format errors and stuff like that
return json.load(f)
@ -128,6 +106,6 @@ def run_daemon():
if __name__ == '__main__':
args = init_parser()
database = read_os_database()
check_update(database)
distributions = get_distributions()
check_update(distributions)
# run_daemon()