|
|
@ -1,138 +1,114 @@ |
|
|
|
#!/usr/bin/env python |
|
|
|
# -*- coding: utf-8 -*- |
|
|
|
from __future__ import print_function |
|
|
|
|
|
|
|
from itertools import imap |
|
|
|
from operator import itemgetter |
|
|
|
import os |
|
|
|
import pickle |
|
|
|
import platform |
|
|
|
import shutil |
|
|
|
import sys |
|
|
|
from time import time |
|
|
|
|
|
|
|
from utils import create_dir |
|
|
|
from utils import decode |
|
|
|
from utils import is_osx |
|
|
|
from utils import is_python3 |
|
|
|
from utils import move_file |
|
|
|
from utils import unico as unicode |
|
|
|
|
|
|
|
|
|
|
|
BACKUP_THRESHOLD = 24 * 60 * 60 |
|
|
|
|
|
|
|
|
|
|
|
def load(config): |
|
|
|
xdg_aj_home = os.path.join( |
|
|
|
os.path.expanduser('~'), |
|
|
|
'.local', |
|
|
|
'share', |
|
|
|
'autojump') |
|
|
|
legacy_data_file = os.path.join(xdg_aj_home, 'autojump.txt') |
|
|
|
|
|
|
|
# Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X |
|
|
|
if is_osx() and os.path.exists(xdg_aj_home): |
|
|
|
return migrate_legacy_data(config) |
|
|
|
elif os.path.exists(legacy_data_file): |
|
|
|
return migrate_legacy_data(config) |
|
|
|
elif os.path.exists(config['data_file']): |
|
|
|
return load_pickle(config) |
|
|
|
migrate_osx_xdg_data(config) |
|
|
|
|
|
|
|
if os.path.exists(config['data_path']): |
|
|
|
try: |
|
|
|
if is_python3(): |
|
|
|
with open(data_path, 'r', encoding='utf-8') as f: |
|
|
|
lines = f.readlines() |
|
|
|
else: |
|
|
|
with open(data_path, 'r') as f: |
|
|
|
lines = f.readlines() |
|
|
|
except (IOError, EOFError): |
|
|
|
return load_backup(config) |
|
|
|
|
|
|
|
# example: '10.0\t/home/user\n' -> ['10.0', '/home/user'] |
|
|
|
parse = lambda x: x.strip().split('\t') |
|
|
|
|
|
|
|
# example: ['10.0', '/home/user'] -> (u'/home/user', 10.0) |
|
|
|
convert = lambda x: (decode(x[1], 'utf-8'), float(x[0])) |
|
|
|
|
|
|
|
return dict(imap(convert, imap(parse, lines))) |
|
|
|
return {} |
|
|
|
|
|
|
|
|
|
|
|
def load_pickle(config): |
|
|
|
with open(config['data_file'], 'rb') as f: |
|
|
|
data = pickle.load(f) |
|
|
|
return data |
|
|
|
def load_backup(config): |
|
|
|
if os.path.exists(config['data_backup_path']): |
|
|
|
move_file(config['data_backup_path'], config['data_path']) |
|
|
|
return load(config) |
|
|
|
return {} |
|
|
|
|
|
|
|
|
|
|
|
def migrate_legacy_data(config): |
|
|
|
def migrate_osx_xdg_data(config): |
|
|
|
""" |
|
|
|
Older versions incorrectly used Linux XDG_DATA_HOME paths on OS X. This |
|
|
|
migrates autojump files from ~/.local/share/autojump to ~/Library/autojump |
|
|
|
""" |
|
|
|
assert is_osx(), "Expecting OSX." |
|
|
|
|
|
|
|
xdg_data_home = os.path.join(os.path.expanduser('~'), '.local', 'share') |
|
|
|
xdg_aj_home = os.path.join(xdg_data_home, 'autojump') |
|
|
|
legacy_data = os.path.join(xdg_aj_home, 'autojump.txt') |
|
|
|
legacy_data_backup = os.path.join(xdg_aj_home, 'autojump.bak') |
|
|
|
|
|
|
|
assert(os.path.exists(xdg_aj_home), "$XDG_DATA_HOME doesn't exist.") |
|
|
|
data_path = os.path.join(xdg_aj_home, 'autojump.txt'), |
|
|
|
data_backup_path = os.path.join(xdg_aj_home, 'autojump.txt.bak'), |
|
|
|
|
|
|
|
# migrate to new file format |
|
|
|
data = load_legacy(legacy_data, legacy_data_backup) |
|
|
|
save(config, data) |
|
|
|
if os.path.exists(data_path): |
|
|
|
move_file(data_path, config['data_path']) |
|
|
|
if os.path.exists(data_backup_path): |
|
|
|
move_file(data_backup_path, config['data_backup_path']) |
|
|
|
|
|
|
|
# cleanup |
|
|
|
if is_osx(): |
|
|
|
shutil.rmtree(xdg_aj_home) |
|
|
|
if len(os.listdir(xdg_data_home)) == 0: |
|
|
|
shutil.rmtree(xdg_data_home) |
|
|
|
else: |
|
|
|
if os.path.exists(legacy_data): |
|
|
|
os.remove(legacy_data) |
|
|
|
if os.path.exists(legacy_data_backup): |
|
|
|
os.remove(legacy_data_backup) |
|
|
|
shutil.rmtree(xdg_aj_home) |
|
|
|
if len(os.listdir(xdg_data_home)) == 0: |
|
|
|
shutil.rmtree(xdg_data_home) |
|
|
|
|
|
|
|
return data |
|
|
|
|
|
|
|
def save(config, data): |
|
|
|
"""Save data and create backup, creating a new data file if necessary.""" |
|
|
|
create_dir(os.path.dirname(config['data_path'])) |
|
|
|
|
|
|
|
def load_legacy(data_file, data_file_backup): |
|
|
|
"""Loads data from legacy data file.""" |
|
|
|
try: |
|
|
|
if is_python3(): |
|
|
|
with open(data_file, 'r', encoding='utf-8') as f: |
|
|
|
lines = f.readlines() |
|
|
|
else: |
|
|
|
with open(data_file, 'r') as f: |
|
|
|
lines = f.readlines() |
|
|
|
except (IOError, EOFError): |
|
|
|
return load_legacy_backup(data_file_backup) |
|
|
|
|
|
|
|
# example: '10.0\t/home/user\n' -> ['10.0', '/home/user'] |
|
|
|
parse = lambda x: x.strip().split('\t') |
|
|
|
# example: ['10.0', '/home/user'] -> (u'/home/user', 10.0) |
|
|
|
convert = lambda x: (decode(x[1], 'utf-8'), float(x[0])) |
|
|
|
|
|
|
|
return dict(imap(convert, imap(parse, lines))) |
|
|
|
|
|
|
|
|
|
|
|
def load_legacy_backup(data_file, data_file_backup): |
|
|
|
"""Loads data from backup data file.""" |
|
|
|
if os.path.exists(data_file_backup): |
|
|
|
shutil.move(data_file_backup, data_file) |
|
|
|
return load_legacy(data_file, None) |
|
|
|
return {} |
|
|
|
# atomically save by writing to temporary file and moving to destination |
|
|
|
temp_file = tempfile.NamedTemporaryFile( |
|
|
|
dir=os.path.dirname(config['data_path']), |
|
|
|
delete=False) |
|
|
|
|
|
|
|
|
|
|
|
def save(self): |
|
|
|
""" |
|
|
|
Save database atomically and preserve backup, creating new database if |
|
|
|
needed. |
|
|
|
""" |
|
|
|
# check file existence and permissions |
|
|
|
if ((not os.path.exists(self.filename)) or |
|
|
|
os.name == 'nt' or |
|
|
|
os.getuid() == os.stat(self.filename)[4]): |
|
|
|
|
|
|
|
create_dir_atomically(self.config['data']) |
|
|
|
|
|
|
|
temp = tempfile.NamedTemporaryFile( |
|
|
|
dir=self.config['data'], |
|
|
|
delete=False) |
|
|
|
|
|
|
|
for path, weight in sorted(self.data.items(), |
|
|
|
key=operator.itemgetter(1), |
|
|
|
try: |
|
|
|
for path, weight in sorted( |
|
|
|
data.iteritems(), |
|
|
|
key=itemgetter(1), |
|
|
|
reverse=True): |
|
|
|
temp.write((unico("%s\t%s\n" % (weight, path)).encode("utf-8"))) |
|
|
|
|
|
|
|
# catching disk errors and skipping save when file handle can't |
|
|
|
# be closed. |
|
|
|
try: |
|
|
|
# http://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/ |
|
|
|
temp.flush() |
|
|
|
os.fsync(temp) |
|
|
|
temp.close() |
|
|
|
except IOError as ex: |
|
|
|
print("Error saving autojump database (disk full?)" % |
|
|
|
ex, file=sys.stderr) |
|
|
|
return |
|
|
|
|
|
|
|
shutil.move(temp.name, self.filename) |
|
|
|
try: # backup file |
|
|
|
import time |
|
|
|
if (not os.path.exists(self.filename+".bak") or |
|
|
|
time.time()-os.path.getmtime(self.filename+".bak") \ |
|
|
|
> 86400): |
|
|
|
shutil.copy(self.filename, self.filename+".bak") |
|
|
|
except OSError as ex: |
|
|
|
print("Error while creating backup autojump file. (%s)" % |
|
|
|
ex, file=sys.stderr) |
|
|
|
|
|
|
|
temp_file.write((unicode("%s\t%s\n" % (weight, path)).encode("utf-8"))) |
|
|
|
|
|
|
|
temp_file.flush() |
|
|
|
os.fsync(temp_file) |
|
|
|
temp_file.close() |
|
|
|
except IOError as ex: |
|
|
|
print("Error saving autojump data (disk full?)" % ex, file=sys.stderr) |
|
|
|
sys.exit(1) |
|
|
|
|
|
|
|
# if no backup file or backup file is older than 24 hours, |
|
|
|
# move autojump.txt -> autojump.txt.bak |
|
|
|
if not os.path.exists(config['data_backup_path']) or \ |
|
|
|
(time() - os.path.getmtime(config['data_backup_path']) > BACKUP_THRESHOLD): |
|
|
|
move_file(config['data_path'], config['data_backup_path']) |
|
|
|
|
|
|
|
# move temp_file -> autojump.txt |
|
|
|
move_file(temp_file.name, config['data_path']) |