Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 70 additions & 0 deletions profiling/borg_job_overhead.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import argparse
from os import path

from peewee import SqliteDatabase

from vorta import config
from vorta.application import VortaApp
from vorta.borg.create import BorgCreateJob
from vorta.borg.init import BorgInitJob
from vorta.store.connection import init_db
from vorta.store.models import BackupProfileModel, RepoModel, SourceFileModel

if __name__ == "__main__":
parser = argparse.ArgumentParser('borg_job_overhead')
parser.add_argument('--input', '-i', action='store', help='The file source')
parser.add_argument('--output', '-o', action='store', help='The resulting repo')
args = parser.parse_args()

# Make sure this profile is empty for reproducible results
config.init_dev_mode(path.join(args.output, 'profile'))
repo_path = path.join(args.output, 'repo')

sqlite_db = SqliteDatabase(
config.SETTINGS_DIR / 'settings.db',
pragmas={
'journal_mode': 'wal',
},
)
init_db(sqlite_db)

app = VortaApp([], False)

repo, _ = RepoModel.get_or_create(
url=args.output, defaults={'name': 'test', 'extra_borg_arguments': [], 'encryption': 'none'}
)
profile = BackupProfileModel.create(name='t1', defaults={'repo': repo})

job_param = {
'extra_borg_arguments': [],
'profile_name': 't1',
'profile': profile,
'profile_id': profile.id,
'repo_url': repo_path,
'encryption': 'none',
"repo_name": "test",
'repo_id': repo.id,
}
cmd = ['borg', 'init', '--info', '--log-json', '--encryption=none', repo_path]
job_param['cmd'] = cmd

job = BorgInitJob(cmd, job_param, 't1')
job.run()

cmd = [
"borg",
"create",
"--list",
"--progress",
"--info",
"--log-json",
"--json",
"--filter=AM",
"-C",
"lz4",
f'{repo_path}::a1',
args.input,
]
job_param['cmd'] = cmd
job = BorgCreateJob(cmd, job_param, 't1')
job.run()
2 changes: 1 addition & 1 deletion src/vorta/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def bootstrap_profile(self, bootstrap_file=None):
double_newline,
str(exception),
double_newline,
self.tr('Consider removing or repairing this file to ' 'get rid of this message.'),
self.tr('Consider removing or repairing this file to get rid of this message.'),
),
)
return
Expand Down
127 changes: 67 additions & 60 deletions src/vorta/borg/borg_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,69 +263,76 @@ def run(self):
os.set_blocking(p.stdout.fileno(), False)
os.set_blocking(p.stderr.fileno(), False)

def read_async(fd):
try:
return fd.read()
except (IOError, TypeError):
return ''

stdout = []
while True:
# Wait for new output
select.select([p.stdout, p.stderr], [], [], 0.1)

stdout.append(read_async(p.stdout))
stderr = read_async(p.stderr)
if stderr:
for line in stderr.split('\n'):
try:
parsed = json.loads(line)

if parsed['type'] == 'log_message':
context = {
'msgid': parsed.get('msgid'),
'repo_url': self.params['repo_url'],
'profile_name': self.params.get('profile_name'),
'cmd': self.params['cmd'][1],
}
self.app.backup_log_event.emit(
f'[{self.params["profile_name"]}] {parsed["levelname"]}: {parsed["message"]}', context
)
level_int = getattr(logging, parsed["levelname"])
logger.log(level_int, parsed["message"])

if level_int >= logging.WARNING:
# Append log to list of error messages
error_messages.append((level_int, parsed["message"]))

elif parsed['type'] == 'file_status':
self.app.backup_log_event.emit(
f'[{self.params["profile_name"]}] {parsed["path"]} ({parsed["status"]})', {}
)
elif parsed['type'] == 'progress_percent' and parsed.get("message"):
self.app.backup_log_event.emit(f'[{self.params["profile_name"]}] {parsed["message"]}', {})
elif parsed['type'] == 'archive_progress' and not parsed.get('finished', False):
msg = (
f"{translate('BorgJob','Files')}: {parsed['nfiles']}, "
f"{translate('BorgJob','Original')}: {pretty_bytes(parsed['original_size'])}, "
# f"{translate('BorgJob','Compressed')}: {pretty_bytes(parsed['compressed_size'])}, "
f"{translate('BorgJob','Deduplicated')}: {pretty_bytes(parsed.get('deduplicated_size', 0))}" # noqa: E501
)
self.app.backup_progress_event.emit(f"[{self.params['profile_name']}] {msg}")
except json.decoder.JSONDecodeError:
msg = line.strip()
if msg: # Log only if there is something to log.
self.app.backup_log_event.emit(f'[{self.params["profile_name"]}] {msg}', {})
logger.warning(msg)

if p.poll() is not None:
time.sleep(0.1)
stdout.append(read_async(p.stdout))
break

fds = [p.stdout, p.stderr]
# Run until both stderr and stdout have been fully drained
# As they hit EOF they are removed from the list
while fds:
readable, _, _ = select.select(fds, [], [])

if p.stdout in readable:
out = p.stdout.read()
if out:
stdout.append(out)
else:
fds.remove(p.stdout)

if p.stderr in readable:
stderr = p.stderr.read()
if stderr:
for line in stderr.split('\n'):
try:
parsed = json.loads(line)

if parsed['type'] == 'log_message':
context = {
'msgid': parsed.get('msgid'),
'repo_url': self.params['repo_url'],
'profile_name': self.params.get('profile_name'),
'cmd': self.params['cmd'][1],
}
self.app.backup_log_event.emit(
f'[{self.params["profile_name"]}] {parsed["levelname"]}: {parsed["message"]}',
context,
)
level_int = getattr(logging, parsed["levelname"])
logger.log(level_int, parsed["message"])

if level_int >= logging.WARNING:
# Append log to list of error messages
error_messages.append((level_int, parsed["message"]))

elif parsed['type'] == 'file_status':
self.app.backup_log_event.emit(
f'[{self.params["profile_name"]}] {parsed["path"]} ({parsed["status"]})', {}
)
elif parsed['type'] == 'progress_percent' and parsed.get("message"):
self.app.backup_log_event.emit(
f'[{self.params["profile_name"]}] {parsed["message"]}', {}
)
elif parsed['type'] == 'archive_progress' and not parsed.get('finished', False):
msg = (
f"{translate('BorgJob', 'Files')}: {parsed['nfiles']}, "
f"{translate('BorgJob', 'Original')}: {pretty_bytes(parsed['original_size'])}, "
# f"{translate('BorgJob','Compressed')}: {pretty_bytes(parsed['compressed_size'])}, " # noqa: E501
f"{translate('BorgJob', 'Deduplicated')}: {pretty_bytes(parsed.get('deduplicated_size', 0))}" # noqa: E501
)
self.app.backup_progress_event.emit(f"[{self.params['profile_name']}] {msg}")
except json.decoder.JSONDecodeError:
msg = line.strip()
if msg: # Log only if there is something to log.
self.app.backup_log_event.emit(f'[{self.params["profile_name"]}] {msg}', {})
logger.warning(msg)
else:
fds.remove(p.stderr)

# stdout and stderr both returned an EOF so now we just wait for the process to exit
returncode = p.wait()

result = {
'params': self.params,
'returncode': self.process.returncode,
'returncode': returncode,
'cmd': self.cmd,
'errors': error_messages,
}
Expand All @@ -336,7 +343,7 @@ def read_async(fd):
except ValueError:
result['data'] = stdout

log_entry.returncode = p.returncode
log_entry.returncode = returncode
log_entry.repo_url = self.params.get('repo_url', None)
log_entry.end_time = dt.now()
with db_lock:
Expand Down
2 changes: 1 addition & 1 deletion src/vorta/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def set_timer_for_profile(self, profile_id: int) -> None:
else:
# int to big to pass it to qt which expects a c++ int
# wait 15 min for regular reschedule
logger.debug(f"Couldn't schedule for {next_time} because " f"timer value {timer_ms} too large.")
logger.debug(f"Couldn't schedule for {next_time} because timer value {timer_ms} too large.")

self.timers[profile_id] = {
'dt': next_time,
Expand Down
2 changes: 1 addition & 1 deletion src/vorta/store/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def get_misc_settings() -> list[dict[str, Any]]:
'type': 'checkbox',
'label': trans_late(
'settings',
"If the system tray isn't available, " "ask whether to continue in the background " "on exit",
"If the system tray isn't available, ask whether to continue in the background on exit",
),
},
{
Expand Down
20 changes: 18 additions & 2 deletions tests/network_manager/test_darwin.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@

pytestmark = pytest.mark.skipif(sys.platform != 'darwin', reason="macOS only")

from vorta.network_status import darwin # noqa: E402


def test_get_current_wifi_when_wifi_is_on(mocker):
from vorta.network_status import darwin # noqa: E402

mock_interface = MagicMock()
mock_network = MagicMock()
mock_interface.lastNetworkJoined.return_value = mock_network
Expand All @@ -23,6 +23,8 @@ def test_get_current_wifi_when_wifi_is_on(mocker):


def test_get_current_wifi_when_wifi_is_off(mocker):
from vorta.network_status import darwin # noqa: E402

mock_interface = MagicMock()
mock_interface.lastNetworkJoined.return_value = None

Expand All @@ -35,6 +37,8 @@ def test_get_current_wifi_when_wifi_is_off(mocker):


def test_get_current_wifi_when_no_wifi_interface(mocker):
from vorta.network_status import darwin # noqa: E402

instance = darwin.DarwinNetworkStatus()
mocker.patch.object(instance, "_get_wifi_interface", return_value=None)

Expand All @@ -45,6 +49,8 @@ def test_get_current_wifi_when_no_wifi_interface(mocker):

@pytest.mark.parametrize("is_hotspot_enabled", [True, False])
def test_network_is_metered_with_ios(mocker, is_hotspot_enabled):
from vorta.network_status import darwin # noqa: E402

mock_interface = MagicMock()
mock_network = MagicMock()
mock_interface.lastNetworkJoined.return_value = mock_network
Expand All @@ -59,6 +65,8 @@ def test_network_is_metered_with_ios(mocker, is_hotspot_enabled):


def test_network_is_metered_when_wifi_is_off(mocker):
from vorta.network_status import darwin # noqa: E402

mock_interface = MagicMock()
mock_interface.lastNetworkJoined.return_value = None

Expand All @@ -78,6 +86,8 @@ def test_network_is_metered_when_wifi_is_off(mocker):
],
)
def test_is_network_metered_with_android(getpacket_output_name, expected, monkeypatch):
from vorta.network_status import darwin # noqa: E402

def mock_getpacket(device):
assert device == 'en0'
return GETPACKET_OUTPUTS[getpacket_output_name]
Expand All @@ -89,6 +99,8 @@ def mock_getpacket(device):


def test_get_known_wifi_networks_when_wifi_interface_exists(monkeypatch):
from vorta.network_status import darwin # noqa: E402

networksetup_output = """
Preferred networks on en0:
Home Network
Expand All @@ -109,6 +121,8 @@ def test_get_known_wifi_networks_when_wifi_interface_exists(monkeypatch):


def test_get_known_wifi_networks_when_no_wifi_interface(mocker):
from vorta.network_status import darwin # noqa: E402

instance = darwin.DarwinNetworkStatus()
mocker.patch.object(instance, "_get_wifi_interface", return_value=None)

Expand All @@ -118,6 +132,8 @@ def test_get_known_wifi_networks_when_no_wifi_interface(mocker):


def test_get_network_devices(monkeypatch):
from vorta.network_status import darwin # noqa: E402

monkeypatch.setattr(darwin, 'call_networksetup_listallhardwareports', lambda: NETWORKSETUP_OUTPUT)

result = list(darwin.get_network_devices())
Expand Down
Loading
Loading