SELKIELogger  1.0.0
SLFiles.py
Go to the documentation of this file.
1 # Copyright (C) 2023 Swansea University
2 #
3 # This file is part of the SELKIELogger suite of tools.
4 #
5 # SELKIELogger is free software: you can redistribute it and/or modify it
6 # under the terms of the GNU General Public License as published by the Free
7 # Software Foundation, either version 3 of the License, or (at your option)
8 # any later version.
9 #
10 # SELKIELogger is distributed in the hope that it will be useful, but WITHOUT
11 # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 # more details.
14 #
15 # You should have received a copy of the GNU General Public License along
16 # with this SELKIELogger product.
17 # If not, see <http://www.gnu.org/licenses/>.
18 
19 import logging
20 import msgpack
21 import os
22 import pandas as pd
23 import numpy as np
24 
25 from numbers import Number
26 from .SLMessages import IDs, SLMessage, SLMessageSink
27 
28 
29 
30 
31 log = logging.getLogger(__name__)
32 
33 
34 class VarFile:
35  """! Represent a channel mapping (.var) file, caching information as necessary"""
36 
37  def __init__(self, filename):
38  """!
39  Create VarFile instance. Does not open or parse file.
40  @param filename File name and path
41  """
42 
43  self._fn_fn = filename
44 
45  self._sm_sm = None
46 
47  def getSourceMap(self, force=False):
48  """!
49  Get source/channel map from file, or return from cache if available.
50  @param force Read file again, even if source map exists
51  @returns SLSourceMap instance
52  """
53  if self._sm_sm and not force:
54  log.debug("Returning cached SourceMap")
55  return self._sm_sm
56 
57  file = open(self._fn_fn, "rb")
58  unpacker = msgpack.Unpacker(file, unicode_errors="ignore")
59  out = SLMessageSink(msglogger=log.getChild("VarFile"))
60 
61  # Seed the map with default names for the data source and converter (us)
62  out.Process(SLMessage(0, 0, "Logger").pack())
63  out.Process(SLMessage(1, 0, "SLPython").pack())
64  for msg in unpacker:
65  log.log(5, msg)
66  msg = out.Process(msg, output="raw")
67  self._sm_sm = out.SourceMap()
68  return self._sm_sm
69 
70  def printSourceMap(self, fancy=True):
71  """!
72  Print a source/channel map, optionally using the tools provided in the "rich" package to provide a prettier output.
73 
74  Falls back to standard print output if `fancy` is False or if the rich package is not installed.
75 
76  @param fancy Enable/disable use of rich features.
77  @returns None
78  """
79  try:
80  from rich.console import Console
81  from rich.table import Table
82  except ImportError:
83  fancy = False
84 
85  if not fancy:
86  print("Source \tChannels")
87  for src in self._sm_sm:
88  print(
89  f"0x{src:02x} - {self._sm.GetSourceName(src):16s}{list(self._sm[src])}"
90  )
91  return
92 
93  # We can be fancy!
94  t = Table(show_header=True, header_style="bold")
95  t.add_column("Source", style="dim", width=6, justify="center")
96  t.add_column("Source Name")
97  t.add_column("Channel Names")
98  for src in self._sm_sm:
99  chanID = 0
100  t.add_row(
101  f"0x{src:02x}", self._sm_sm.GetSourceName(src), str(list(self._sm_sm[src]))
102  )
103  Console().print(t)
104 
105 
106 class DatFile:
107  """!
108  Represent a SELKIELogger data file and associated common operations.
109  """
110 
111  def __init__(self, filename, pcs=IDs.SLSOURCE_TIMER):
112  """!
113  Create DatFile instance. Does not open or parse file.
114  @param filename File name and path
115  @param pcs Primary Clock Source (Source ID)
116  """
117 
118 
119  self._fn_fn = filename
120 
121  self._fields_fields = None
122 
123  if isinstance(pcs, Number):
124  self._pcs_pcs = int(pcs)
125  else:
126  self._pcs_pcs = int(pcs, 0)
127 
128  self._sm_sm = None
129 
130  self._records_records = None
131 
132  def addSourceMap(self, sm):
133  """!
134  Associate an SLChannelMap instance with this file. Used to map source
135  and channel IDs to names.
136  @param sm SLChannelMap (see VarFile.getSourceMap)
137  @returns None
138  """
139  if self._sm_sm:
140  log.warning("Overriding existing source map")
141  self._sm_sm = sm
142 
143  @staticmethod
144  def tryParse(value):
145  """!
146  Attempt to convert value to float. If unsuccessful, parse as JSON and attempt to return a float from either a) the sole value within the object or b) the data associated with a key named "value".
147  @param value Input value
148  @returns Floating point value or np.nan on failure
149  """
150  try:
151  x = float(value)
152  return x
153  except ValueError:
154  pass
155 
156  try:
157  import json
158 
159  x = json.loads(value, parse_int=float, parse_constant=float)
160  if len(x) == 1:
161  return float(x.popitem()[1])
162  elif "value" in x:
163  return float(x["value"])
164  except:
165  return np.nan
166 
167  def prepConverters(self, force=False, includeTS=False):
168  """!
169  Generate and cache functions to convert each channel into defined
170  fields and corresponding field names suitable for creating a DataFrame
171  later.
172 
173  Names and functions are returned as a collection keyed by source and
174  channel. As each message may produce multiple message, names and
175  functions are each returned as lists containing a minimum of one entry.
176 
177  ```.py
178  fields = x.prepConverters()
179  names, funcs = fields[source][channel]
180  out = {}
181  out[names[0]] = funcs[0](input)
182  ```
183  Where input is an input SLMessage
184 
185  @param force Regenerate fields rather than using cached values
186  @param includeTS Retain timestamp field as data column as well as index
187  @returns Collection of conversion functions and field names
188  """
189  if self._fields_fields and not force:
190  log.debug("Returning cached converters")
191  return self._fields_fields
192 
193  simpleSources = [x for x in range(IDs.SLSOURCE_I2C, IDs.SLSOURCE_I2C + 0x10)]
194  simpleSources += [x for x in range(IDs.SLSOURCE_MP, IDs.SLSOURCE_MP + 0x10)]
195  simpleSources += [x for x in range(IDs.SLSOURCE_ADC, IDs.SLSOURCE_ADC + 0x10)]
196  simpleSources += [x for x in range(IDs.SLSOURCE_EXT, IDs.SLSOURCE_EXT + 0x07)]
197 
198  fields = {}
199  for src in self._sm_sm:
200  fields[src] = {}
201  if src == self._pcs_pcs:
202  if includeTS:
203  fields[src][0x02] = [
204  [f"Timestamp:0x{self._pcs:02x}"],
205  [lambda x: x.Data],
206  ]
207  cid = 0
208  for chan in list(self._sm_sm[src]):
209  if cid > IDs.SLCHAN_TSTAMP and chan != "":
210  fields[src][cid] = [[f"{chan}:0x{src:02x}"], [lambda x: x.Data]]
211  cid += 1
212  elif src in range(IDs.SLSOURCE_GPS, IDs.SLSOURCE_GPS + 0x10):
213  cid = 0
214  for chan in list(self._sm_sm[src]):
215  if cid == IDs.SLCHAN_TSTAMP:
216  fields[src][cid] = [
217  [f"Timestamp:0x{src:02x}"],
218  [lambda x: x.Data],
219  ]
220  cid += 1
221  elif cid in [IDs.SLCHAN_NAME, IDs.SLCHAN_MAP, IDs.SLCHAN_RAW]:
222  # Don't include in frame outputs
223  cid += 1
224  continue
225  elif cid == 4:
226  # Position
227  fields[src][cid] = [
228  [
229  f"Longitude:0x{src:02x}",
230  f"Latitude:0x{src:02x}",
231  f"Height:0x{src:02x}",
232  f"HAcc:0x{src:02x}",
233  f"VAcc:0x{src:02x}",
234  ],
235  [
236  lambda x: x.Data[0],
237  lambda x: x.Data[1],
238  lambda x: x.Data[2],
239  lambda x: x.Data[4],
240  lambda x: x.Data[5],
241  ],
242  ]
243  cid += 1
244  elif cid == 5:
245  # Velocity
246  fields[src][cid] = [
247  [
248  f"Velocity_N:0x{src:02x}",
249  f"Velocity_E:0x{src:02x}",
250  f"Velocity_D:0x{src:02x}",
251  f"SpeecAcc:0x{src:02x}",
252  f"Heading:0x{src:02x}",
253  f"HeadAcc:0x{src:02x}",
254  ],
255  [
256  lambda x: x.Data[0],
257  lambda x: x.Data[1],
258  lambda x: x.Data[2],
259  lambda x: x.Data[5],
260  lambda x: x.Data[4],
261  lambda x: x.Data[6],
262  ],
263  ]
264  cid += 1
265  elif cid == 6:
266  # Date/Time
267  fields[src][cid] = [
268  [
269  f"Date:0x{src:02x}",
270  f"Time:0x{src:02x}",
271  f"DTAcc:0x{src:02x}",
272  ],
273  [
274  lambda x: f"{x.Data[0]:04.0f}-{x.Data[1]:02.0f}-{x.Data[2]:02.0f}",
275  lambda x: f"{x.Data[3]:02.0f}:{x.Data[4]:02.0f}:{x.Data[5]:02.0f}.{x.Data[6]:06.0f}",
276  lambda x: f"{x.Data[7]:09.0f}",
277  ],
278  ]
279  cid += 1
280  elif self._sm_sm[src][cid] == "":
281  cid += 1
282  continue
283  else:
284  fields[src][cid] = [[f"{chan}:0x{src:02x}"], [lambda x: x.Data]]
285  cid += 1
286  elif src in range(IDs.SLSOURCE_MQTT, IDs.SLSOURCE_MQTT + 0x07):
287  cid = 0
288  for chan in list(self._sm_sm[src]):
289  if cid in [IDs.SLCHAN_NAME, IDs.SLCHAN_MAP]:
290  cid += 1
291  continue
292  elif chan == "":
293  cid += 1
294  continue
295  elif cid == IDs.SLCHAN_TSTAMP:
296  fields[src][cid] = [
297  [f"Timestamp:0x{src:02x}"],
298  [lambda x: x.Data],
299  ]
300  cid += 1
301  else:
302  fields[src][cid] = [
303  [f"{chan}:0x{src:02x}"],
304  [lambda x: self.tryParsetryParse(x.Data)],
305  ]
306  cid += 1
307  elif src in simpleSources:
308  cid = 0
309  for chan in list(self._sm_sm[src]):
310  if cid in [IDs.SLCHAN_NAME, IDs.SLCHAN_MAP]: # , IDs.SLCHAN_RAW]:
311  cid += 1
312  continue
313  elif chan == "" or chan == "-":
314  cid += 1
315  continue
316  elif chan == "Raw Data" or (
317  cid == IDs.SLCHAN_RAW and chan.lower().startswith("raw")
318  ):
319  cid += 1
320  continue
321  elif cid == IDs.SLCHAN_TSTAMP:
322  fields[src][cid] = [
323  [f"Timestamp:0x{src:02x}"],
324  [lambda x: x.Data],
325  ]
326  cid += 1
327  else:
328  fields[src][cid] = [[f"{chan}:0x{src:02x}"], [lambda x: x.Data]]
329  cid += 1
330  else:
331  if src >= 0x02:
332  log.info(
333  f"No conversion routine known for source 0x{src:02x} ({self._sm[src]})"
334  )
335  self._fields_fields = fields
336  self._columnList_columnList = []
337  for _, channels in self._fields_fields.items():
338  for _, c in channels.items():
339  self._columnList_columnList.extend(c[0])
340 
341  return self._fields_fields
342 
343  def buildRecord(self, msgStack):
344  """!
345  Convert a group (stack) of messages into a single record, keyed by field names.
346 
347  Field values default to None, and in the event that multiple values for
348  a single field are received, the last value received will be stored.
349 
350  @param msgStack Group of messages received for a specific interval
351  @returns Record/dictionary of values keyed by field name
352  """
353  record = {}
354  for sid, channels in self._fields_fields.items():
355  for cid, converter in channels.items():
356  for s in converter[0]:
357  record[s] = None
358 
359  for m in msgStack:
360  try:
361  converter = self._fields_fields[m.SourceID][m.ChannelID]
362  except KeyError:
363  continue
364  ls = converter[0]
365  dat = [d(m) for d in converter[1]]
366  for x in range(len(ls)):
367  record[ls[x]] = dat[x]
368  return record
369 
370  def messages(self, source=None, channel=None):
371  """!
372  Process data file and yield messages, optionally restricted to those
373  matching a specific source and/or channel ID.
374 
375  * x.messages() - Yields all messages
376  * x.messages(source=0x10) - Yields all messages from source 0x10 (GPS0)
377  * x.messages(channel=0x03) - Yields all channel 3 (raw) messages from any source
378  * x.messages(0x10, 0x03) - Yield all channel 3 messages from source 0x10
379 
380  @param source Optional: Source ID to match
381  @param channel Optional: Channel ID to match
382  @returns Yields messages in file order
383  """
384  datFile = open(self._fn_fn, "rb")
385  unpacker = msgpack.Unpacker(datFile, unicode_errors="ignore")
386  sink = SLMessageSink(msglogger=log.getChild("Data"))
387 
388  sink.Process(SLMessage(0, 0, "Logger").pack())
389  sink.Process(SLMessage(1, 0, "SLPython").pack())
390 
391  for msg in unpacker:
392  msg = sink.Process(msg, output="raw", allMessages=True)
393  if msg is None:
394  continue
395 
396  if source and msg.SourceID != source:
397  continue
398 
399  if channel and msg.ChannelID != channel:
400  continue
401 
402  yield msg
403 
404  datFile.close()
405 
406  def processMessages(self, includeTS=False, force=False, chunkSize=100000):
407  """!
408  Process messages and return. Will yield data in chunks.
409  @param includeTS Passed to prepConverters()
410  @param force Passed to prepConverters()
411  @param chunkSize Yield records after this many timestamps
412  @returns List of tuples containing timestamp and dictionary of records
413  """
414  if self._records_records:
415  log.error("Some records already cached - discarding")
416  del self._records_records
417 
418  fields = self.prepConvertersprepConverters(includeTS=includeTS, force=force)
419  log.debug(fields)
420  log.debug(f"Primary clock source: 0x{self._pcs:02x} [{self._sm[self._pcs]}]")
421 
422  self._records_records = []
423  stack = []
424  currentTime = 0
425  nextTime = 0
426  count = 0
427  for msg in self.messagesmessages():
428 
429  count += 1
430 
431  if msg.SourceID == self._pcs_pcs and msg.ChannelID == IDs.SLCHAN_TSTAMP:
432  nextTime = msg.Data
433 
439  if nextTime != currentTime:
440  tsdf = self.buildRecordbuildRecord(stack)
441  stack.clear()
442  currentTime = nextTime
443  self._records_records.extend([(currentTime, tsdf)])
444  numTS = len(self._records_records)
445  if (numTS % chunkSize) == 0:
446  yield self._records_records
447  self._records_records.clear()
448  tsdf.clear()
449  continue
450 
451  stack.extend([msg])
452 
453 
454  yield self._records_records
455 
456  # Out of messages and no more timestamps available
457  log.debug(
458  f"Out of data - {len(stack)} messages abandoned beyond last timestanp"
459  )
460 
461  def yieldDataFrame(self, dropna=False, resample=None, convertEpoch=False):
462  """!
463  Process file and yield results as dataframes that can be merged later.
464 
465  Optionally drop empty records and perform naive averaging over a given resample interval.
466  @param dropna Drop rows consisting entirely of NaN/None vales
467  @param resample Resampling interval
468  @returns pandas.DataFrame representing a chunk of file data
469  """
470  count = 0
471  lastDT = None
472  DTCol = f"DT:0x{self._pcs:02x}"
473  EpochCol = f"Epoch:0x{self._pcs:02x}"
474  for chunk in self.processMessagesprocessMessages():
475  ndf = pd.DataFrame(data=[x[1] for x in chunk], index=[x[0] for x in chunk])
476  count += len(ndf)
477  if resample:
478  ndf.index = pd.to_timedelta(ndf.index.values, unit="ms")
479  ndf = ndf.resample(resample).mean()
480 
481  # Double 'astype' here to ensure we get an integer representing milliseconds back
482  ndf.index = [x.astype("m8[ms]").astype(int) for x in ndf.index.values]
483 
484  for x in ndf.columns:
485  if pd.api.types.is_numeric_dtype(ndf[x].dtype):
486  ndf[x] = ndf[x].astype(pd.SparseDtype(ndf[x].dtype, np.nan))
487 
488  ndf = ndf.reindex(columns=self._columnList_columnList, copy=False)
489 
490  if convertEpoch:
491  ndf[DTCol] = pd.to_datetime(
492  ndf[EpochCol].sparse.to_dense(), unit="s", errors="ignore"
493  ).interpolate("ffill")
494 
495  # Isolate values before first time stamp
496  firstValIX = ndf[DTCol].dropna().head(1).index[0]
497  preTSVals = ndf.loc[ndf.index.min() : firstValIX, DTCol]
498  preTSVals = preTSVals.head(len(preTSVals) - 1)
499  if lastDT:
500  ndf.loc[preTSVals.index, DTCol] = lastDT[0] + pd.to_timedelta(
501  preTSVals.index - lastDT[1], unit="ms"
502  )
503  else:
504  ndf.loc[preTSVals.index, DTCol] = (
505  ndf[DTCol].dropna().head(1)
506  - pd.to_timedelta(firstValIX - preTSVals.index, unit="ms")
507  ).values
508 
509  for l, g in ndf.groupby(DTCol):
510  delta = g.index.values - g.index.values.min()
511  dt = g[DTCol] + pd.to_timedelta(delta, unit="ms")
512  if max(delta) > 1000:
513  print(f"Large interval encountered at {l} [{max(delta)}]")
514  ndf.loc[g.index, DTCol] = dt
515 
516  lastDT = ndf[DTCol].dropna().tail(1)
517  lastDT = (lastDT.values[0], lastDT.index[0])
518 
519  if dropna:
520  ndf.dropna(how="all", inplace=True)
521  ndf.index.name = "Timestamp"
522  yield ndf
523 
524  def asDataFrame(self, dropna=False, resample=None, convertEpoch=False):
525  """!
526  Wrapper around yieldDataFrame.
527  Processes all records and merges them into a single frame.
528  @param dropna Drop empty records. @see yieldDataFrame()
529  @param resample Resampling interval. @see yieldDataFrame()
530  @returns pandas.DataFrame containing file data
531  """
532  df = None
533  for ndf in self.yieldDataFrameyieldDataFrame(dropna, resample, convertEpoch):
534  count = len(ndf)
535  if df is None:
536  df = ndf
537  else:
538  df = pd.concat([df, ndf], copy=False)
539  del ndf
540  log.info(
541  f"{count} steps processed ({pd.to_timedelta((df.index.max() - df.index.min()), unit='ms')})"
542  )
543 
544  df.index.name = "Timestamp"
545  return df
546 
547 
548 class StateFile:
549  """! Represent a logger state file, caching information as necessary"""
550 
551  def __init__(self, filename):
552  """!
553  Create new object. File is not opened or parsed until requested.
554  @param filename Path to state file to be read
555  """
556 
557  self._fn_fn = filename
558 
559 
560  self._sm_sm = None
561 
562  self._ts_ts = None
563 
564  self._vf_vf = None
565 
566  self._stats_stats = None
567 
568  def parse(self):
569  """!
570  Read file and extract data
571  @returns Channel statistics (also stored in _stats)
572  """
573  with open(self._fn_fn) as sf:
574  try:
575  # Safer, as ensures we get the mtime of the file we have open,
576  # even if it's been replaced underneath us in the interim
577  self._mtime_mtime = os.fstat(sf.fileno()).st_mtime
578  except IOError:
579  # But if we can't do that, try reading the file by name
580  self._mtime_mtime = os.stat(self._fn_fn).st_mtime
581 
582  self._ts_ts = int(sf.readline())
583  self._vf_vf = VarFile(sf.readline().strip()).getSourceMap()
584  cols = ["Source", "Channel", "Count", "Time", "Value"]
585  self._stats_stats = pd.read_csv(
586  sf,
587  header=None,
588  names=cols,
589  index_col=["Source", "Channel"],
590  quotechar="'",
591  converters={x: lambda z: int(z, base=0) for x in cols if x != "Value"},
592  )
593  self._stats_stats["SecondsAgo"] = (self._stats_stats["Time"] - self._ts_ts) / 1000
594  self._stats_stats["DateTime"] = (
595  self._stats_stats["Time"]
596  .apply(self.to_clocktimeto_clocktime)
597  .apply(lambda x: x.strftime("%Y-%m-%d %H:%M:%S"))
598  )
599  return self._stats_stats
600 
601  def sources(self):
602  """!
603  Retrieve list of sources referenced in this state file, parsing file if necessary
604  @returns Sorted set of source IDs
605  """
606  if self._stats_stats is None:
607  if self.parseparse() is None:
608  return None
609 
610  return sorted(set(self._stats_stats.index.get_level_values(0)))
611 
612  def channels(self, source):
613  """!
614  Extract all channels referenced in this state file for a specific source ID.
615 
616  Will parse the state file if required.
617 
618  @param source Source ID to extract
619  @returns Sorted set of channel IDs
620  """
621  if self._stats_stats is None:
622  if self.parseparse() is None:
623  return None
624  return sorted(
625  set(self._stats_stats[(source, 0x00):(source, 0xFF)].index.get_level_values(1))
626  )
627 
628  def last_source_message(self, source):
629  """!
630  Find most recent message received from a given source and convert to a
631  clocktime value.
632 
633  Will parse the state file if required.
634 
635  @param source Source ID to check
636  @returns clocktime, or None on error.
637  """
638  if self._stats_stats is None:
639  if self.parseparse() is None:
640  return None
641 
642  try:
643  times = self._stats_stats.loc[(source, 0x00):(source, 0xFF)].Time
644  return self.to_clocktimeto_clocktime(times.max())
645  except (KeyError, TypeError):
646  return None
647 
648  def last_channel_message(self, source, channel):
649  """!
650  Find most recent message received from a channel (specified by source
651  and channel IDs) and convert to a clocktime value.
652 
653  Will parse the state file if required.
654 
655  @param source Source ID to be checked
656  @param channel Channel ID to be checked
657  @returns clocktime, or None on error.
658  """
659  if self._stats_stats is None:
660  self.parseparse()
661  try:
662  return self.to_clocktimeto_clocktime(self._stats_stats.loc[(source, channel)].Time)
663  except KeyError:
664  return None
665 
666  def timestamp(self):
667  """!
668  Return latest timestamp from state file, parsing file if required.
669  @returns Latest logger timestamp value (ms)
670  """
671  if self._ts_ts is None:
672  self.parseparse()
673  return self._ts_ts
674 
675  def to_clocktime(self, timestamp):
676  """!
677  Convert logger timestamp to real date/time
678 
679  Assumes that the most recent message was received at the file's
680  modification time and uses this as an offset.
681 
682  @param timestamp Value to be converted
683  @returns Pandas DateTime object
684  """
685  if timestamp is None:
686  return None
687  if self._ts_ts is None or self._mtime_mtime is None:
688  self.parseparse()
689  delta = self._mtime_mtime - self._ts_ts / 1000
690  return pd.to_datetime(timestamp / 1000 + delta, unit="s")
Represent a SELKIELogger data file and associated common operations.
Definition: SLFiles.py:106
def asDataFrame(self, dropna=False, resample=None, convertEpoch=False)
Wrapper around yieldDataFrame.
Definition: SLFiles.py:524
def processMessages(self, includeTS=False, force=False, chunkSize=100000)
Process messages and return.
Definition: SLFiles.py:406
_fn
File name and path.
Definition: SLFiles.py:119
def addSourceMap(self, sm)
Associate an SLChannelMap instance with this file.
Definition: SLFiles.py:132
def prepConverters(self, force=False, includeTS=False)
Generate and cache functions to convert each channel into defined fields and corresponding field name...
Definition: SLFiles.py:167
def buildRecord(self, msgStack)
Convert a group (stack) of messages into a single record, keyed by field names.
Definition: SLFiles.py:343
_pcs
Primary Clock Source ID.
Definition: SLFiles.py:124
def yieldDataFrame(self, dropna=False, resample=None, convertEpoch=False)
Process file and yield results as dataframes that can be merged later.
Definition: SLFiles.py:461
def messages(self, source=None, channel=None)
Process data file and yield messages, optionally restricted to those matching a specific source and/o...
Definition: SLFiles.py:370
_sm
Source/Channel Map.
Definition: SLFiles.py:128
_fields
Cached conversion functions.
Definition: SLFiles.py:121
_records
File data records (once parsed)
Definition: SLFiles.py:130
def __init__(self, filename, pcs=IDs.SLSOURCE_TIMER)
Create DatFile instance.
Definition: SLFiles.py:111
def tryParse(value)
Attempt to convert value to float.
Definition: SLFiles.py:144
Represent a logger state file, caching information as necessary.
Definition: SLFiles.py:548
def sources(self)
Retrieve list of sources referenced in this state file, parsing file if necessary.
Definition: SLFiles.py:601
def last_channel_message(self, source, channel)
Find most recent message received from a channel (specified by source and channel IDs) and convert to...
Definition: SLFiles.py:648
def __init__(self, filename)
Create new object.
Definition: SLFiles.py:551
def to_clocktime(self, timestamp)
Convert logger timestamp to real date/time.
Definition: SLFiles.py:675
def channels(self, source)
Extract all channels referenced in this state file for a specific source ID.
Definition: SLFiles.py:612
def parse(self)
Read file and extract data.
Definition: SLFiles.py:568
_fn
File name (and path, if required) for this instance.
Definition: SLFiles.py:557
_vf
Channel mapping file / VarFile associated with this state file.
Definition: SLFiles.py:564
def timestamp(self)
Return latest timestamp from state file, parsing file if required.
Definition: SLFiles.py:666
_ts
Last known timestamp.
Definition: SLFiles.py:562
_stats
Source/Channel statistics.
Definition: SLFiles.py:566
def last_source_message(self, source)
Find most recent message received from a given source and convert to a clocktime value.
Definition: SLFiles.py:628
Represent a channel mapping (.var) file, caching information as necessary.
Definition: SLFiles.py:34
def getSourceMap(self, force=False)
Get source/channel map from file, or return from cache if available.
Definition: SLFiles.py:47
def __init__(self, filename)
Create VarFile instance.
Definition: SLFiles.py:37
_sm
Source/Channel Map.
Definition: SLFiles.py:45
def printSourceMap(self, fancy=True)
Print a source/channel map, optionally using the tools provided in the "rich" package to provide a pr...
Definition: SLFiles.py:70
_fn
File name and path.
Definition: SLFiles.py:43
Parse incoming messages.
Definition: SLMessages.py:495
Python representation of a logged message.
Definition: SLMessages.py:75