history: update for database changes

Signed-off-by: Eric Callahan <arksine.code@gmail.com>
This commit is contained in:
Eric Callahan 2022-01-30 20:38:54 -05:00
parent 9e57db0611
commit c081fa49a1

View File

@ -4,6 +4,7 @@
from __future__ import annotations from __future__ import annotations
import time import time
from asyncio import Lock
# Annotation imports # Annotation imports
from typing import ( from typing import (
@ -17,11 +18,9 @@ from typing import (
if TYPE_CHECKING: if TYPE_CHECKING:
from confighelper import ConfigHelper from confighelper import ConfigHelper
from websockets import WebRequest from websockets import WebRequest
from . import database from .database import MoonrakerDatabase as DBComp
from .job_state import JobState from .job_state import JobState
from .file_manager import file_manager from .file_manager.file_manager import FileManager
DBComp = database.MoonrakerDatabase
FMComp = file_manager.FileManager
HIST_NAMESPACE = "history" HIST_NAMESPACE = "history"
MAX_JOBS = 10000 MAX_JOBS = 10000
@ -29,10 +28,10 @@ MAX_JOBS = 10000
class History: class History:
def __init__(self, config: ConfigHelper) -> None: def __init__(self, config: ConfigHelper) -> None:
self.server = config.get_server() self.server = config.get_server()
self.file_manager: FMComp = self.server.lookup_component( self.file_manager: FileManager = self.server.lookup_component(
'file_manager') 'file_manager')
self.request_lock = Lock()
database: DBComp = self.server.lookup_component("database") database: DBComp = self.server.lookup_component("database")
self.gcdb = database.wrap_namespace("gcode_metadata", parse_keys=False)
self.job_totals: Dict[str, float] = database.get_item( self.job_totals: Dict[str, float] = database.get_item(
"moonraker", "history.job_totals", "moonraker", "history.job_totals",
{ {
@ -42,7 +41,7 @@ class History:
'total_filament_used': 0., 'total_filament_used': 0.,
'longest_job': 0., 'longest_job': 0.,
'longest_print': 0. 'longest_print': 0.
}) }).result()
self.server.register_event_handler( self.server.register_event_handler(
"server:klippy_disconnect", self._handle_disconnect) "server:klippy_disconnect", self._handle_disconnect)
@ -77,93 +76,95 @@ class History:
self.current_job: Optional[PrinterJob] = None self.current_job: Optional[PrinterJob] = None
self.current_job_id: Optional[str] = None self.current_job_id: Optional[str] = None
self.next_job_id: int = 0 self.next_job_id: int = 0
self.cached_job_ids = self.history_ns.keys() self.cached_job_ids = self.history_ns.keys().result()
if self.cached_job_ids: if self.cached_job_ids:
self.next_job_id = int(self.cached_job_ids[-1], 16) + 1 self.next_job_id = int(self.cached_job_ids[-1], 16) + 1
async def _handle_job_request(self, async def _handle_job_request(self,
web_request: WebRequest web_request: WebRequest
) -> Dict[str, Any]: ) -> Dict[str, Any]:
action = web_request.get_action() async with self.request_lock:
if action == "GET": action = web_request.get_action()
job_id = web_request.get_str("uid") if action == "GET":
if job_id not in self.cached_job_ids: job_id = web_request.get_str("uid")
raise self.server.error(f"Invalid job uid: {job_id}", 404) if job_id not in self.cached_job_ids:
job = self.history_ns[job_id] raise self.server.error(f"Invalid job uid: {job_id}", 404)
return {"job": self._prep_requested_job(job, job_id)} job = await self.history_ns[job_id]
if action == "DELETE": return {"job": self._prep_requested_job(job, job_id)}
all = web_request.get_boolean("all", False) if action == "DELETE":
if all: all = web_request.get_boolean("all", False)
deljobs = self.cached_job_ids if all:
self.history_ns.clear() deljobs = self.cached_job_ids
self.cached_job_ids = [] self.history_ns.clear()
self.next_job_id = 0 self.cached_job_ids = []
return {'deleted_jobs': deljobs} self.next_job_id = 0
return {'deleted_jobs': deljobs}
job_id = web_request.get_str("uid") job_id = web_request.get_str("uid")
if job_id not in self.cached_job_ids: if job_id not in self.cached_job_ids:
raise self.server.error(f"Invalid job uid: {job_id}", 404) raise self.server.error(f"Invalid job uid: {job_id}", 404)
self.delete_job(job_id) self.delete_job(job_id)
return {'deleted_jobs': [job_id]} return {'deleted_jobs': [job_id]}
raise self.server.error("Invalid Request Method") raise self.server.error("Invalid Request Method")
async def _handle_jobs_list(self, async def _handle_jobs_list(self,
web_request: WebRequest web_request: WebRequest
) -> Dict[str, Any]: ) -> Dict[str, Any]:
i = 0 async with self.request_lock:
count = 0 i = 0
end_num = len(self.cached_job_ids) count = 0
jobs: List[Dict[str, Any]] = [] end_num = len(self.cached_job_ids)
start_num = 0 jobs: List[Dict[str, Any]] = []
start_num = 0
before = web_request.get_float("before", -1) before = web_request.get_float("before", -1)
since = web_request.get_float("since", -1) since = web_request.get_float("since", -1)
limit = web_request.get_int("limit", 50) limit = web_request.get_int("limit", 50)
start = web_request.get_int("start", 0) start = web_request.get_int("start", 0)
order = web_request.get_str("order", "desc") order = web_request.get_str("order", "desc")
if order not in ["asc", "desc"]: if order not in ["asc", "desc"]:
raise self.server.error(f"Invalid `order` value: {order}", 400) raise self.server.error(f"Invalid `order` value: {order}", 400)
reverse_order = (order == "desc") reverse_order = (order == "desc")
# cached jobs is asc order, find lower and upper boundary # cached jobs is asc order, find lower and upper boundary
if since != -1: if since != -1:
while start_num < end_num: while start_num < end_num:
job_id = self.cached_job_ids[start_num] job_id = self.cached_job_ids[start_num]
job: Dict[str, Any] = self.history_ns[job_id] job: Dict[str, Any] = await self.history_ns[job_id]
if job['start_time'] > since: if job['start_time'] > since:
break break
start_num += 1 start_num += 1
if before != -1: if before != -1:
while end_num > 0: while end_num > 0:
job_id = self.cached_job_ids[end_num-1] job_id = self.cached_job_ids[end_num-1]
job = self.history_ns[job_id] job = await self.history_ns[job_id]
if job['end_time'] < before: if job['end_time'] < before:
break break
end_num -= 1 end_num -= 1
if start_num >= end_num or end_num == 0: if start_num >= end_num or end_num == 0:
return {"count": 0, "jobs": []} return {"count": 0, "jobs": []}
i = start i = start
count = end_num - start_num count = end_num - start_num
if limit == 0: if limit == 0:
limit = MAX_JOBS limit = MAX_JOBS
while i < count and len(jobs) < limit: while i < count and len(jobs) < limit:
if reverse_order: if reverse_order:
job_id = self.cached_job_ids[end_num - i - 1] job_id = self.cached_job_ids[end_num - i - 1]
else: else:
job_id = self.cached_job_ids[start_num + i] job_id = self.cached_job_ids[start_num + i]
job = self.history_ns[job_id] job = await self.history_ns[job_id]
jobs.append(self._prep_requested_job(job, job_id)) jobs.append(self._prep_requested_job(job, job_id))
i += 1 i += 1
return {"count": count, "jobs": jobs} return {"count": count, "jobs": jobs}
async def _handle_job_totals(self, async def _handle_job_totals(self,
web_request: WebRequest web_request: WebRequest
@ -186,7 +187,7 @@ class History:
'longest_print': 0. 'longest_print': 0.
} }
database: DBComp = self.server.lookup_component("database") database: DBComp = self.server.lookup_component("database")
database.insert_item( await database.insert_item(
"moonraker", "history.job_totals", self.job_totals) "moonraker", "history.job_totals", self.job_totals)
return {'last_totals': last_totals} return {'last_totals': last_totals}
@ -275,16 +276,19 @@ class History:
self.current_job = None self.current_job = None
self.current_job_id = None self.current_job_id = None
def get_job(self, job_id: Union[int, str]) -> Optional[Dict[str, Any]]: async def get_job(self,
job_id: Union[int, str]
) -> Optional[Dict[str, Any]]:
if isinstance(job_id, int): if isinstance(job_id, int):
job_id = f"{job_id:06X}" job_id = f"{job_id:06X}"
return self.history_ns.get(job_id, None) return await self.history_ns.get(job_id, None)
def grab_job_metadata(self) -> None: def grab_job_metadata(self) -> None:
if self.current_job is None: if self.current_job is None:
return return
filename: str = self.current_job.get("filename") filename: str = self.current_job.get("filename")
metadata: Dict[str, Any] = self.gcdb.get(filename, {}) mdst = self.file_manager.get_metadata_storage()
metadata: Dict[str, Any] = mdst.get(filename, {})
if metadata: if metadata:
# Add the start time and job id to the # Add the start time and job id to the
# persistent metadata storage # persistent metadata storage
@ -292,7 +296,7 @@ class History:
'print_start_time': self.current_job.get('start_time'), 'print_start_time': self.current_job.get('start_time'),
'job_id': self.current_job_id 'job_id': self.current_job_id
}) })
self.gcdb[filename] = metadata mdst.insert(filename, metadata.copy())
# We don't need to store these fields in the # We don't need to store these fields in the
# job metadata, as they are redundant # job metadata, as they are redundant
metadata.pop('print_start_time', None) metadata.pop('print_start_time', None)