cache lines to speed up rendering

This commit is contained in:
2024-04-23 10:28:44 +02:00
parent aa2bfa967e
commit be53c209ea

View File

@@ -7,6 +7,7 @@ from src.ui.bigtext.highlighting import Highlighting
from src.ui.bigtext.line import Line from src.ui.bigtext.line import Line
import os import os
from src.settings.settings import Settings from src.settings.settings import Settings
from functools import lru_cache
class LogFileModel: class LogFileModel:
@@ -21,6 +22,8 @@ class LogFileModel:
range_start = 0 range_start = 0
range_end = -1 range_end = -1
_line_cache = {}
def __init__(self, file: str, settings: Settings, original_file: str = False): def __init__(self, file: str, settings: Settings, original_file: str = False):
""" """
:param file: :param file:
@@ -133,6 +136,13 @@ class LogFileModel:
def _is_word_char(self, char: str) -> bool: def _is_word_char(self, char: str) -> bool:
return re.match(r"\w", char) is not None return re.match(r"\w", char) is not None
def prune_cache(self, range_start: int, range_end: int):
for key in list(self._line_cache.keys()):
line = self._line_cache[key]
if range_start > line.byte_end() or line.byte_offset() > range_end:
del self._line_cache[key]
def data(self, byte_offset: int, scroll_lines: int, lines: int, range_start: int, range_end: int) -> List[Line]: def data(self, byte_offset: int, scroll_lines: int, lines: int, range_start: int, range_end: int) -> List[Line]:
# print("data(%s, %s, %s)" % (byte_offset, scroll_lines, lines)) # print("data(%s, %s, %s)" % (byte_offset, scroll_lines, lines))
lines_before_offset: List[Line] = [] lines_before_offset: List[Line] = []
@@ -148,19 +158,36 @@ class LogFileModel:
offset = max(0, offset = max(0,
max(range_start - self.settings.max_line_length(), offset - self.settings.max_line_length())) max(range_start - self.settings.max_line_length(), offset - self.settings.max_line_length()))
self.prune_cache(range_start, range_end)
previous_line_is_complete = False
f.seek(offset) f.seek(offset)
while l := f.readline(): while True:
line: Line | None = self._line_cache.get(offset)
if line is None:
line_bytes = f.readline()
if not line_bytes:
break
new_offset = f.tell() new_offset = f.tell()
if 0 <= range_end < new_offset: if 0 <= range_end < new_offset:
break break
line = Line(offset, new_offset, l.decode("utf8", errors="ignore")) line = Line(offset, new_offset, line_bytes.decode("utf8", errors="ignore"))
if previous_line_is_complete: # only cache lines when we know they are complete
self._line_cache[offset] = line
offset = new_offset
previous_line_is_complete = True
else:
# print(f"loaded cached line at offset {offset}")
offset = line.byte_end() # line.byte_end() returns the end byte +1
f.seek(offset)
previous_line_is_complete = True
if line.byte_end() <= byte_offset: # line.byte_end() returns the end byte +1 if line.byte_end() <= byte_offset: # line.byte_end() returns the end byte +1
if line.byte_offset() >= range_start: # only add if in range if line.byte_offset() >= range_start: # only add if in range
lines_before_offset.append(line) lines_before_offset.append(line)
else: else:
lines_after_offset.append(line) lines_after_offset.append(line)
offset = f.tell()
if len(lines_after_offset) >= lines_to_find: if len(lines_after_offset) >= lines_to_find:
break break