try to fix ui freeze when filtering huge files
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import math
|
||||
import threading
|
||||
import time
|
||||
from typing import List
|
||||
from line import Line
|
||||
import os
|
||||
@@ -7,11 +8,11 @@ from settings import Settings
|
||||
|
||||
|
||||
class LogFileModel:
|
||||
_lock = threading.RLock()
|
||||
|
||||
def __init__(self, file:str, settings: Settings):
|
||||
self.settings = settings
|
||||
self._file = os.path.realpath(file)
|
||||
self._lock = threading.RLock()
|
||||
|
||||
def get_file(self):
|
||||
return self._file
|
||||
@@ -36,11 +37,12 @@ class LogFileModel:
|
||||
#print("data(%s, %s, %s)" % (byte_offset, scroll_lines, lines))
|
||||
lines_before_offset: List[Line] = []
|
||||
lines_after_offset: List[Line] = []
|
||||
result: List[Line] = []
|
||||
lines_to_find = lines + abs(scroll_lines)
|
||||
lines_to_return = math.ceil(lines)
|
||||
|
||||
#start = time.time()
|
||||
with self._lock:
|
||||
#print("data lock acquision %.4f" % (time.time() -start))
|
||||
# TODO handle lines longer than 4096 bytes
|
||||
# TODO abort file open after a few secons: https://docs.python.org/3/library/signal.html#example
|
||||
with open(self._file, 'rb') as f:
|
||||
|
||||
Reference in New Issue
Block a user