Reference and API

This page covers the various functions inside the sealhits project and is intended for Python programmers.

There are a number of modules, but typically, one starts with the pytritech.GLF module to load the file.

ciheader

The CI Header used at the top of each record in the tritech glf file.

CIHeader

The Common interface header. Our GLF Dat file contains lots of these that contain various sonar data.

Source code in src/pytritech/ciheader.py
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
class CIHeader:
    """The Common interface header. Our GLF Dat file
    contains lots of these that contain various sonar
    data."""

    header_size = 21

    def __init__(self, dat, ids):
        """
        Initialise our CIHeader object.

        Args:
            dat (bytes): A Bytes like file object open for reading.
            ids (int): The offset within dat to start reading from.
        """
        # First character should be an asterix
        assert dat[ids : ids + 1].decode("utf8") == "*"
        # Ignored for now
        # version = int.from_bytes(dat[ids+1:ids+2], 'little')
        self.payload_length = (
            int.from_bytes(dat[ids + 2 : ids + 6], "little") - CIHeader.header_size
        )

        tts = struct.unpack("<d", dat[ids + 6 : ids + 14])[0]
        tseconds = int(tts)
        tmillis = int((tts - tseconds) * 1000)
        itime = EpochGem() + datetime.timedelta(seconds=tseconds, milliseconds=tmillis)

        # Conversion to UTC. Must subtract an hour as the BTC in
        # EpochGem *advances* the clock by one hour as it assumes
        # I'm reading the date as UTC, converting to BST (where it)
        # adds the hour, then we remove that hour by one when we
        # convert back. What we actually want is to read in as BST
        # the convert to UTC and go back an hour.

        # In addition, ALL timedelta operations must be performed
        # BEFORE conversions between timezones as Python has an
        # annoying habit of dropping timezone information when one
        # performs operations on datetime.

        bst = pytz.timezone("Europe/London")
        itime = bst.localize(itime)
        self.time = itime.astimezone(pytz.utc)

        self.type = int.from_bytes(dat[ids + 14 : ids + 15], "little")
        self.device_id = int.from_bytes(dat[ids + 15 : ids + 17], "little")
        self.node_id = int.from_bytes(dat[ids + 17 : ids + 19], "little")
        # Ignored for now
        # spare = dat[ids+19:ids+21]

    def __len__(self):
        return CIHeader.header_size

    def __str__(self):
        return (
            str(self.payload_length)
            + ","
            + str(self.time)
            + ","
            + str(self.type)
            + ","
            + str(self.device_id)
            + ","
            + str(self.node_id)
            + ","
            + str(CIHeader.header_size)
        )

__init__(dat, ids)

Initialise our CIHeader object.

Parameters:
  • dat (bytes) –

    A Bytes like file object open for reading.

  • ids (int) –

    The offset within dat to start reading from.

src/pytritech/ciheader.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
def __init__(self, dat, ids):
    """
    Initialise our CIHeader object.

    Args:
        dat (bytes): A Bytes like file object open for reading.
        ids (int): The offset within dat to start reading from.
    """
    # First character should be an asterix
    assert dat[ids : ids + 1].decode("utf8") == "*"
    # Ignored for now
    # version = int.from_bytes(dat[ids+1:ids+2], 'little')
    self.payload_length = (
        int.from_bytes(dat[ids + 2 : ids + 6], "little") - CIHeader.header_size
    )

    tts = struct.unpack("<d", dat[ids + 6 : ids + 14])[0]
    tseconds = int(tts)
    tmillis = int((tts - tseconds) * 1000)
    itime = EpochGem() + datetime.timedelta(seconds=tseconds, milliseconds=tmillis)

    # Conversion to UTC. Must subtract an hour as the BTC in
    # EpochGem *advances* the clock by one hour as it assumes
    # I'm reading the date as UTC, converting to BST (where it)
    # adds the hour, then we remove that hour by one when we
    # convert back. What we actually want is to read in as BST
    # the convert to UTC and go back an hour.

    # In addition, ALL timedelta operations must be performed
    # BEFORE conversions between timezones as Python has an
    # annoying habit of dropping timezone information when one
    # performs operations on datetime.

    bst = pytz.timezone("Europe/London")
    itime = bst.localize(itime)
    self.time = itime.astimezone(pytz.utc)

    self.type = int.from_bytes(dat[ids + 14 : ids + 15], "little")
    self.device_id = int.from_bytes(dat[ids + 15 : ids + 17], "little")
    self.node_id = int.from_bytes(dat[ids + 17 : ids + 19], "little")

glf

Reading data from the Tritech Gemini files.

This module contains the following
  • GLF - The GLF class representing a GLF file

>>> from glf import GLF
>>> glfobj = GLF("path/to/file.glf")
>>> record = glfobj.images[0]
>>> bitmap, dims = glfobj.extract_image(record)

GLF

A class that represents the GLF file. We hold the various records in order with headers and values, but not the raw image/sonar data. We hold pointers to these into the GLF file.

Source code in src/pytritech/glf.py
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
class GLF:
    """A class that represents the GLF file. We hold the various records
    in order with headers and values, but not the raw image/sonar data.
    We hold pointers to these into the GLF file."""

    # TODO - potentially join multiple GLF files together?
    # TODO - potentially keep all files as zlib.IO pointers and keep them all open?

    def __init__(self, glfpath: str):
        """Initialise our glf object.

        Args:
            glfpath (str): full path and name of the glf file.
        """
        self.filepath = glfpath
        self._zobject = None
        self.config = None
        self.images = None
        self.dat = None # Keep the whole dat in memory if we can
        self.status = None
        self.statuses = []

    def _read_config(self):
        """Return the config file as an element tree xml parsed2."""
        for zname in self._zobject.namelist():
            # Look for the config file
            if ".cfg" in zname:
                with self._zobject.open(zname) as f:
                    cfg = f.read().decode("utf-8")
                    # Need to change a few tags as they are invalid
                    cfg = cfg.replace("<0>", "<zero>")
                    cfg = cfg.replace("</0>", "</zero>")
                    cfg = cfg.replace("<1>", "<one>")
                    cfg = cfg.replace("</1>", "</one>")

                    root = ET.fromstring(cfg)
                    self.config = root

        # Set some useful parameters on the GLF class such as sonar_id and range
        # TODO - it turns out that the range text doesn't always exist and there may be multiple ones
        self.sonar_ids = []

        for sonar_node in root.find("GuiCurrentSettings/devices"):
            self.sonar_ids.append(int(sonar_node.find("id").text))


    def __enter__(self):
        self._zobject = ZipFile(self.filepath, "r")

        for zname in self._zobject.namelist():
            if ".dat" in zname:
                self._f = self._zobject.open(zname)

        self._read_config()
        # We've read the config so we should be at the image_records
        self._parse_dat()

        return self

    def __exit__(self, *args):
        self._zobject.close()
        del self.dat


    def extract_image(self, image_rec: ImageRecord) -> Tuple[bytes, Tuple[int, int]]:
        """Return the data for the image, along with the dimensions -
        (bearing, range).

        Args:
            image_rec (ImageRecord): The record for which we want the bitmap

        Returns:
            Tuple: A tuple of bytes, and a tuple of (int, int) for height x width

        """
        image_data = None

        ptr = image_rec.image_data_ptr
        #self._f.seek(ptr)
        #image_data = self._f.read(image_rec.image_data_size)
        image_data = self.dat[ptr:ptr+image_rec.image_data_size]

        if image_rec.compression_type == 0:
            image_data = isal_zlib.decompress(image_data)
        elif image_rec.compression_type == 2:
            print("H264 decompression not yet implemented.")
            assert False

        return image_data, image_rec.image_dim

    def _parse_dat(self):
        """Read the dat file stored inside the glf which uses Zip."""
        self.images = []
        self.dat = self._f.read()
        file_offset = 0

        # We should get towards the last bytes, both of which
        # should be DEDE
        while file_offset < len(self.dat) - 2:
            header = CIHeader(self.dat, file_offset)
            file_offset += len(header)

            if header.type == 0:
                # image record
                image_rec = ImageRecord(header, self.dat, file_offset)
                self.images.append(image_rec)
                file_offset += len(image_rec)
            elif header.type == 1:
                # V4 protocol
                assert False
                break
            elif header.type == 2:
                # analog video
                assert False
                break
            elif header.type == 3:
                # Gemini Status
                status_rec = StatusRecord(header, self.dat, file_offset)
                self.statuses.append(status_rec)
                file_offset += len(status_rec)
            elif header.type == 98:
                # Raw Serial
                assert False
                break
            elif header.type == 99:
                # Generic
                assert False
                break

        assert (
            int.from_bytes(self.dat[file_offset : file_offset + 2], "little")
            == 0xDEDE
        )

__init__(glfpath)

Initialise our glf object.

Parameters:
  • glfpath (str) –

    full path and name of the glf file.

src/pytritech/glf.py
36
37
38
39
40
41
42
43
44
45
46
47
48
def __init__(self, glfpath: str):
    """Initialise our glf object.

    Args:
        glfpath (str): full path and name of the glf file.
    """
    self.filepath = glfpath
    self._zobject = None
    self.config = None
    self.images = None
    self.dat = None # Keep the whole dat in memory if we can
    self.status = None
    self.statuses = []

extract_image(image_rec)

Return the data for the image, along with the dimensions - (bearing, range).

Parameters:
  • image_rec (ImageRecord) –

    The record for which we want the bitmap

Returns:
  • Tuple( Tuple[bytes, Tuple[int, int]] ) –

    A tuple of bytes, and a tuple of (int, int) for height x width

src/pytritech/glf.py
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
def extract_image(self, image_rec: ImageRecord) -> Tuple[bytes, Tuple[int, int]]:
    """Return the data for the image, along with the dimensions -
    (bearing, range).

    Args:
        image_rec (ImageRecord): The record for which we want the bitmap

    Returns:
        Tuple: A tuple of bytes, and a tuple of (int, int) for height x width

    """
    image_data = None

    ptr = image_rec.image_data_ptr
    #self._f.seek(ptr)
    #image_data = self._f.read(image_rec.image_data_size)
    image_data = self.dat[ptr:ptr+image_rec.image_data_size]

    if image_rec.compression_type == 0:
        image_data = isal_zlib.decompress(image_data)
    elif image_rec.compression_type == 2:
        print("H264 decompression not yet implemented.")
        assert False

    return image_data, image_rec.image_dim

glftimes

Reading time data from the Tritech Gemini files.

This module contains the following
  • glftimes - A faster way to read the GLF time ranges. This is handy if you have many files to read.

>>> from glftimes import time_range
>>> start, end = time_range("path/to/file.glf")

glf_times(glf_path)

Given a path to a GLF file, extract the times from the CFG file contained within the uncompressed Zip.

src/pytritech/glftimes.py
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
def glf_times(glf_path: str):
    """Given a path to a GLF file, extract the times from the
    CFG file contained within the uncompressed Zip."""

    with open(glf_path, 'rb') as f:
        glf_file_size = os.path.getsize(glf_path)
        ds = 0

        while ds < glf_file_size:
            file_sig = int(struct.unpack("<I", f.read(4))[0])
            assert file_sig == 0x04034b50

            f.seek(14,1)
            #compressed_file_size = int(struct.unpack("<I", f.read(4))[0])
            _ = int(struct.unpack("<I", f.read(4))[0])
            uncompressed_file_size = int(struct.unpack("<I", f.read(4))[0])
            filename_len = int(struct.unpack("<H", f.read(2))[0])
            extra_field_length = int(struct.unpack("<H", f.read(2))[0])
            filename = str(f.read(filename_len))
            f.seek(extra_field_length, 1)
            f.seek(5, 1) # TODO - this really shouldn't be here but it works! 

            if ".cfg" in filename:
                # we've found the file we need, so read it and return the cfg.
                # We know that this file at any rate is uncompressed.
                cfg = f.read(uncompressed_file_size).decode("utf-8")

                # Need to change a few tags as they are invalid
                cfg = cfg.replace("<0>", "<zero>")
                cfg = cfg.replace("</0>", "</zero>")
                cfg = cfg.replace("<1>", "<one>")
                cfg = cfg.replace("</1>", "</one>")

                root = ET.fromstring(cfg)

                start_date_cfg = float(root.find("logHeader/creationTime").text)
                end_date_cfg = float(root.find("logTerminator/closeTime").text)

                bst = pytz.timezone("Europe/London")
                utc = pytz.timezone("UTC")

                tseconds = int(start_date_cfg)
                tmillis = int((start_date_cfg - tseconds) * 1000)
                start_date_cfg = bst.localize(EpochGem() + datetime.timedelta(seconds=tseconds, milliseconds=tmillis))
                start_date_cfg = start_date_cfg.astimezone(utc)

                tseconds = int(end_date_cfg)
                tmillis = int((end_date_cfg - tseconds) * 1000)
                end_date_cfg = bst.localize(EpochGem() + datetime.timedelta(seconds=tseconds, milliseconds=tmillis))
                end_date_cfg = end_date_cfg.astimezone(utc)

                return (start_date_cfg, end_date_cfg)

            else:
                f.seek(uncompressed_file_size, 1)

            ds += 30 + filename_len + extra_field_length + uncompressed_file_size

    return None

image

The ImageRecord from the GLF File.

This module contains the following
  • ImageRecord - the record of the actual reading from the sonar at a particular time.

ImageRecord

The main image record from the sonar. This starts with a record header structure, followed by a GMainImage structure (from the Tritech PDF). GMainImage also contains a GImage structure first.

This ImageRecord object does not contain the binary image data itself, but a pointer to where this data lives in the GLF file. The image can be retrieved using the GLF object and this ImageRecord object.

Source code in src/pytritech/image.py
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
class ImageRecord:
    """The main image record from the sonar. This starts with a record
    header structure, followed by a GMainImage structure (from the Tritech
    PDF). GMainImage also contains a GImage structure first.

    This ImageRecord object does not contain the binary image data itself,
    but a pointer to where this data lives in the GLF file. The image can be
    retrieved using the GLF object and this ImageRecord object.

    """

    def __init__(self, ciheader, dat, ids):
        """Initialise our ImageRecord object.

        Args:
            ciheader (CIHeader): the CIHeader object that preceeds this record in the GLF file.
            dat (bytes): A Bytes like file object open for reading.
            ids (int): The offset within dat to start reading from.
        """
        self.header = ciheader
        # Start with the Record Header
        ds = ids
        rtype = int.from_bytes(dat[ds : ds + 2], "little")
        assert rtype == 1  # All image records type should be 1
        version = int.from_bytes(dat[ds + 2 : ds + 4], "little")
        assert version == 0xEFEF  # All image records version should be 0xEFEF
        ds += 4

        # Now we are in the GImage Record
        # This is in a different order from what is printed in the datasheet.
        self.image_version = int.from_bytes(dat[ds : ds + 2], "little")
        self.range_start = int(struct.unpack("<I", dat[ds + 2 : ds + 6])[0])
        self.range_end = struct.unpack("<I", dat[ds + 6 : ds + 10])[0]
        self.range_compression = struct.unpack("<H", dat[ds + 10 : ds + 12])[0]
        self.bearing_start = struct.unpack("<I", dat[ds + 12 : ds + 16])[0]
        self.bearing_end = struct.unpack("<I", dat[ds + 16 : ds + 20])[0]
        ds += 20

        self.compression_type = (
            1  # Assume it's not compressed to begin with - TODO - enum!
        )

        # This is only in version 3?
        if self.image_version == 3:
            self.compression_type = struct.unpack("<H", dat[ds: ds + 2])[0]
            ds += 2

        # Actual image data is here.
        csize = int(struct.unpack("<I", dat[ds : ds + 4])[0])
        self.image_data_ptr = ds + 4
        self.image_data_size = csize
        ds += 4 + csize

        # TODO - when reading without zipfile, everything is fine up to here!
        # Continue with GMainImageRecord
        # TODO - We may split GMainImageRecord and GImage eventually
        btsize = self.bearing_end - self.bearing_start
        self.bearing_table = []

        for i in range(0, btsize):
            bearing = struct.unpack("<d", dat[ds + (i * 8) : ds + ((i + 1) * 8)])[0]
            self.bearing_table.append(bearing)

        ds += (btsize * 8)
        self.state_flags = dat[ds : ds + 4]
        self.modulation_freq = struct.unpack("<I", dat[ds + 4 : ds + 8])[0]
        ds += 8

        self.beam_form_app = struct.unpack("<f", dat[ds : ds + 4])[0]
        # This is only guaranteed in milliseconds, not microseconds
        # so do the rounding manually
        tts = struct.unpack("<d", dat[ds + 4 : ds + 12])[0]
        tseconds = int(tts)
        tmillis = int((tts - tseconds) * 1000)
        itime = EpochGem() + datetime.timedelta(seconds=tseconds, milliseconds=tmillis)
        bst = pytz.timezone("Europe/London")
        itime = bst.localize(itime)
        self.db_tx_time = itime.astimezone(pytz.utc)

        self.ping_flags = dat[ds + 12 : ds + 14]
        self.sos_at_xd = struct.unpack("<f", dat[ds + 14 : ds + 18])[0]
        self.percent_gain = struct.unpack("<h", dat[ds + 18 : ds + 20])[0]
        self.chirp = struct.unpack("?", dat[ds + 20 : ds + 21])[0]
        self.sonar_type = int.from_bytes(dat[ds + 21 : ds + 22], "little")
        self.platform = int.from_bytes(dat[ds + 22 : ds + 23], "little")

        #print("iRec", self.db_tx_time, self.chirp, self.sonar_type, self.platform, self.image_version, self.sos_at_xd, self.beam_form_app, self.modulation_freq)

        # For some reason there is an extra byte in here? Word padding?
        # Actually, there seems to be a while lot more!
        endtag = int.from_bytes(dat[ds + 24 : ds + 26], "little")

        assert endtag == 0xDEDE
        ds += 26
        # Assumes the image_data field is still compressed or as it was
        # when the file was read. We uncompress so the 'real' size may
        # be larger, but record size is needed to advance along the
        # catalog so we don't adjust it here.
        self.record_size = ds - ids

        self.image_dim = (
            self.bearing_end - self.bearing_start,
            self.range_end - self.range_start,
        )

        # Check for compression
        if self.image_version != 3:
            exp_size = (self.bearing_end - self.bearing_start) * (
                self.range_end - self.range_start
            )
            if exp_size == self.image_data_size:
                print("Not compressed")
            else:
                # Zlib compression for lower image version
                self.compression_type = 0

    def __len__(self):
        return self.record_size

    def __str__(self):
        return (
            str(self.image_version)
            + ","
            + str(self.range_compression)
            + ","
            + str(self.compression_type)
            + ","
            + str(self.bearing_start)
            + ","
            + str(self.bearing_end)
            + ","
            + str(self.range_start)
            + ","
            + str(self.range_end)
            + ","
            + str(self.state_flags)
            + ","
            + str(self.image_data_size)
            + ","
            + str(self.modulation_freq)
            + ","
            + str(self.beam_form_app)
            + ","
            + str(self.db_tx_time)
            + ","
            + str(self.ping_flags)
            + ","
            + str(self.sos_at_xd)
            + ","
            + str(self.percent_gain)
            + ","
            + str(self.chirp)
            + ","
            + str(self.sonar_type)
            + ","
            + str(self.platform)
            + ","
            + str(self.record_size)
        )

__init__(ciheader, dat, ids)

Initialise our ImageRecord object.

Parameters:
  • ciheader (CIHeader) –

    the CIHeader object that preceeds this record in the GLF file.

  • dat (bytes) –

    A Bytes like file object open for reading.

  • ids (int) –

    The offset within dat to start reading from.

src/pytritech/image.py
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
def __init__(self, ciheader, dat, ids):
    """Initialise our ImageRecord object.

    Args:
        ciheader (CIHeader): the CIHeader object that preceeds this record in the GLF file.
        dat (bytes): A Bytes like file object open for reading.
        ids (int): The offset within dat to start reading from.
    """
    self.header = ciheader
    # Start with the Record Header
    ds = ids
    rtype = int.from_bytes(dat[ds : ds + 2], "little")
    assert rtype == 1  # All image records type should be 1
    version = int.from_bytes(dat[ds + 2 : ds + 4], "little")
    assert version == 0xEFEF  # All image records version should be 0xEFEF
    ds += 4

    # Now we are in the GImage Record
    # This is in a different order from what is printed in the datasheet.
    self.image_version = int.from_bytes(dat[ds : ds + 2], "little")
    self.range_start = int(struct.unpack("<I", dat[ds + 2 : ds + 6])[0])
    self.range_end = struct.unpack("<I", dat[ds + 6 : ds + 10])[0]
    self.range_compression = struct.unpack("<H", dat[ds + 10 : ds + 12])[0]
    self.bearing_start = struct.unpack("<I", dat[ds + 12 : ds + 16])[0]
    self.bearing_end = struct.unpack("<I", dat[ds + 16 : ds + 20])[0]
    ds += 20

    self.compression_type = (
        1  # Assume it's not compressed to begin with - TODO - enum!
    )

    # This is only in version 3?
    if self.image_version == 3:
        self.compression_type = struct.unpack("<H", dat[ds: ds + 2])[0]
        ds += 2

    # Actual image data is here.
    csize = int(struct.unpack("<I", dat[ds : ds + 4])[0])
    self.image_data_ptr = ds + 4
    self.image_data_size = csize
    ds += 4 + csize

    # TODO - when reading without zipfile, everything is fine up to here!
    # Continue with GMainImageRecord
    # TODO - We may split GMainImageRecord and GImage eventually
    btsize = self.bearing_end - self.bearing_start
    self.bearing_table = []

    for i in range(0, btsize):
        bearing = struct.unpack("<d", dat[ds + (i * 8) : ds + ((i + 1) * 8)])[0]
        self.bearing_table.append(bearing)

    ds += (btsize * 8)
    self.state_flags = dat[ds : ds + 4]
    self.modulation_freq = struct.unpack("<I", dat[ds + 4 : ds + 8])[0]
    ds += 8

    self.beam_form_app = struct.unpack("<f", dat[ds : ds + 4])[0]
    # This is only guaranteed in milliseconds, not microseconds
    # so do the rounding manually
    tts = struct.unpack("<d", dat[ds + 4 : ds + 12])[0]
    tseconds = int(tts)
    tmillis = int((tts - tseconds) * 1000)
    itime = EpochGem() + datetime.timedelta(seconds=tseconds, milliseconds=tmillis)
    bst = pytz.timezone("Europe/London")
    itime = bst.localize(itime)
    self.db_tx_time = itime.astimezone(pytz.utc)

    self.ping_flags = dat[ds + 12 : ds + 14]
    self.sos_at_xd = struct.unpack("<f", dat[ds + 14 : ds + 18])[0]
    self.percent_gain = struct.unpack("<h", dat[ds + 18 : ds + 20])[0]
    self.chirp = struct.unpack("?", dat[ds + 20 : ds + 21])[0]
    self.sonar_type = int.from_bytes(dat[ds + 21 : ds + 22], "little")
    self.platform = int.from_bytes(dat[ds + 22 : ds + 23], "little")

    #print("iRec", self.db_tx_time, self.chirp, self.sonar_type, self.platform, self.image_version, self.sos_at_xd, self.beam_form_app, self.modulation_freq)

    # For some reason there is an extra byte in here? Word padding?
    # Actually, there seems to be a while lot more!
    endtag = int.from_bytes(dat[ds + 24 : ds + 26], "little")

    assert endtag == 0xDEDE
    ds += 26
    # Assumes the image_data field is still compressed or as it was
    # when the file was read. We uncompress so the 'real' size may
    # be larger, but record size is needed to advance along the
    # catalog so we don't adjust it here.
    self.record_size = ds - ids

    self.image_dim = (
        self.bearing_end - self.bearing_start,
        self.range_end - self.range_start,
    )

    # Check for compression
    if self.image_version != 3:
        exp_size = (self.bearing_end - self.bearing_start) * (
            self.range_end - self.range_start
        )
        if exp_size == self.image_data_size:
            print("Not compressed")
        else:
            # Zlib compression for lower image version
            self.compression_type = 0

status

The Status record from the GLF file.

This module contains the following
  • StatusRecord - The StatusRecord class representing the status of the sonar at a particular time.

StatusRecord

The current status of the Sonar at this time.

Source code in src/pytritech/status.py
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
class StatusRecord:
    """The current status of the Sonar at this time."""

    def __init__(self, ciheader, dat, ids):
        """Initialise our StatusRecord object.

        Args:
            ciheader (CIHeader): the CIHeader object that preceeds this record in the GLF file.
            dat (bytes): A Bytes like file object open for reading.
            ids (int): The offset within dat to start reading from.
        """
        self.header = ciheader
        ds = ids
        self.bf_version = struct.unpack("<H", dat[ds : ds + 2])[0]
        self.da_version = struct.unpack("<H", dat[ds + 2 : ds + 4])[0]
        self.flags = dat[ds + 4 : ds + 6]
        self.device_id = struct.unpack("<H", dat[ds + 6 : ds + 8])[0]
        self.xd_selected = dat[ds + 8 : ds + 9]
        ds += 10

        # one char reserved for future use
        self.vga_T1 = struct.unpack("<d", dat[ds : ds + 8])[0]
        self.vga_T2 = struct.unpack("<d", dat[ds + 8 : ds + 16])[0]
        self.vga_T3 = struct.unpack("<d", dat[ds + 16 : ds + 24])[0]
        self.vga_T4 = struct.unpack("<d", dat[ds + 24 : ds + 32])[0]
        ds += 32

        self.psu_T = struct.unpack("<d", dat[ds : ds + 8])[0]
        self.die_T = struct.unpack("<d", dat[ds + 8 : ds + 16])[0]
        self.tx_T = struct.unpack("<d", dat[ds + 16 : ds + 24])[0]
        ds += 24

        self.afe0_top_temp = struct.unpack("<d", dat[ds : ds + 8])[0]
        self.afe0_bot_temp = struct.unpack("<d", dat[ds + 8 : ds + 16])[0]
        self.afe1_top_temp = struct.unpack("<d", dat[ds + 16 : ds + 24])[0]
        self.afe1_bot_temp = struct.unpack("<d", dat[ds + 24 : ds + 32])[0]
        self.afe2_top_temp = struct.unpack("<d", dat[ds + 32 : ds + 40])[0]
        self.afe2_bot_temp = struct.unpack("<d", dat[ds + 40 : ds + 48])[0]
        self.afe3_top_temp = struct.unpack("<d", dat[ds + 48 : ds + 56])[0]
        self.afe3_bot_temp = struct.unpack("<d", dat[ds + 56 : ds + 64])[0]
        ds += 64

        self.link_type = dat[ds : ds + 2]
        self.uplink_speed = struct.unpack("<d", dat[ds + 2 : ds + 10])[0]
        self.downlink_speed = struct.unpack("<d", dat[ds + 10 : ds + 18])[0]
        self.link_quality = struct.unpack("<H", dat[ds + 18 : ds + 20])[0]
        self.packet_count = struct.unpack("<I", dat[ds + 20 : ds + 24])[0]
        self.recv_error_count = struct.unpack("<I", dat[ds + 24 : ds + 28])[0]
        self.resent_packet_count = struct.unpack("<I", dat[ds + 28 : ds + 32])[0]
        self.dropped_packet_count = struct.unpack("<I", dat[ds + 32 : ds + 36])[0]
        self.unknown_packet_count = struct.unpack("<I", dat[ds + 36 : ds + 40])[0]
        ds += 40

        self.lost_line_count = struct.unpack("<I", dat[ds : ds + 4])[0]
        self.general_count = struct.unpack("<I", dat[ds + 4 : ds + 8])[0]
        self.sonar_alt_ip = struct.unpack("<I", dat[ds + 8 : ds + 12])[0]
        self.surface_ip = struct.unpack("<I", dat[ds + 12 : ds + 16])[0]
        self.subnet_mask = dat[ds + 16 : ds + 20]
        self.mac_addr = dat[ds + 20 : ds + 26]

        # Two unsigned ints for internal usage
        ds += 26

        # TODO - uint64_t
        self.boot_sts_register = dat[ds : ds + 4]
        self.boot_sts_register_da = dat[ds + 4 : ds + 8]
        self.fpga_time = struct.unpack("<Q", dat[ds + 8 : ds + 16])[0]
        self.dip_switch = dat[ds + 16 : ds + 18]
        # Short for internal usage
        self.shutdown_status = dat[ds + 18 : ds + 20]
        self.net_adap_found = struct.unpack("?", dat[ds + 20 : ds + 21])[0]
        ds += 22

        # self.subsea_internal_temp = struct.unpack('<d', dat[ds:ds+8])[0]
        # self.subsea_cpu_temp = struct.unpack('<d', dat[ds+8:ds+16])[0]
        # self.ui_frame = struct.unpack('<I', dat[ds+16:ds+20])[0]
        # ds += 20

        self.record_size = ds - ids

    def __len__(self):
        return self.record_size

    def __str__(self):
        return (
            str(self.bf_version)
            + ","
            + str(self.da_version)
            + ","
            + str(self.flags)
            + ","
            + str(self.device_id)
            + ","
            + str(self.xd_selected)
            + ","
            + str(self.vga_T1)
            + ","
            + str(self.vga_T2)
            + ","
            + str(self.vga_T3)
            + ","
            + str(self.vga_T4)
            + ","
            + str(self.psu_T)
            + ","
            + str(self.die_T)
            + ","
            + str(self.tx_T)
            + ","
            + str(self.afe0_top_temp)
            + ","
            + str(self.afe0_bot_temp)
            + ","
            + str(self.afe1_top_temp)
            + ","
            + str(self.afe1_bot_temp)
            + ","
            + str(self.afe2_top_temp)
            + ","
            + str(self.afe2_bot_temp)
            + ","
            + str(self.afe3_top_temp)
            + ","
            + str(self.afe3_bot_temp)
            + ","
            + str(self.link_type)
            + ","
            + str(self.uplink_speed)
            + ","
            + str(self.downlink_speed)
            + ","
            + str(self.link_quality)
            + ","
            + str(self.packet_count)
            + ","
            + str(self.recv_error_count)
            + ","
            + str(self.resent_packet_count)
            + ","
            + str(self.dropped_packet_count)
            + ","
            + str(self.unknown_packet_count)
            + ","
            + str(self.lost_line_count)
            + ","
            + str(self.general_count)
            + ","
            + str(self.sonar_alt_ip)
            + ","
            + str(self.surface_ip)
            + ","
            + str(self.subnet_mask)
            + ","
            + str(self.mac_addr)
            + ","
            + str(self.boot_sts_register)
            + ","
            + str(self.boot_sts_register_da)
            + ","
            + str(self.fpga_time)
            + ","
            + str(self.dip_switch)
            + ","
            + str(self.shutdown_status)
            + ","
            + str(self.net_adap_found)
            + ","
            + str(self.record_size)
        )

__init__(ciheader, dat, ids)

Initialise our StatusRecord object.

Parameters:
  • ciheader (CIHeader) –

    the CIHeader object that preceeds this record in the GLF file.

  • dat (bytes) –

    A Bytes like file object open for reading.

  • ids (int) –

    The offset within dat to start reading from.

src/pytritech/status.py
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
def __init__(self, ciheader, dat, ids):
    """Initialise our StatusRecord object.

    Args:
        ciheader (CIHeader): the CIHeader object that preceeds this record in the GLF file.
        dat (bytes): A Bytes like file object open for reading.
        ids (int): The offset within dat to start reading from.
    """
    self.header = ciheader
    ds = ids
    self.bf_version = struct.unpack("<H", dat[ds : ds + 2])[0]
    self.da_version = struct.unpack("<H", dat[ds + 2 : ds + 4])[0]
    self.flags = dat[ds + 4 : ds + 6]
    self.device_id = struct.unpack("<H", dat[ds + 6 : ds + 8])[0]
    self.xd_selected = dat[ds + 8 : ds + 9]
    ds += 10

    # one char reserved for future use
    self.vga_T1 = struct.unpack("<d", dat[ds : ds + 8])[0]
    self.vga_T2 = struct.unpack("<d", dat[ds + 8 : ds + 16])[0]
    self.vga_T3 = struct.unpack("<d", dat[ds + 16 : ds + 24])[0]
    self.vga_T4 = struct.unpack("<d", dat[ds + 24 : ds + 32])[0]
    ds += 32

    self.psu_T = struct.unpack("<d", dat[ds : ds + 8])[0]
    self.die_T = struct.unpack("<d", dat[ds + 8 : ds + 16])[0]
    self.tx_T = struct.unpack("<d", dat[ds + 16 : ds + 24])[0]
    ds += 24

    self.afe0_top_temp = struct.unpack("<d", dat[ds : ds + 8])[0]
    self.afe0_bot_temp = struct.unpack("<d", dat[ds + 8 : ds + 16])[0]
    self.afe1_top_temp = struct.unpack("<d", dat[ds + 16 : ds + 24])[0]
    self.afe1_bot_temp = struct.unpack("<d", dat[ds + 24 : ds + 32])[0]
    self.afe2_top_temp = struct.unpack("<d", dat[ds + 32 : ds + 40])[0]
    self.afe2_bot_temp = struct.unpack("<d", dat[ds + 40 : ds + 48])[0]
    self.afe3_top_temp = struct.unpack("<d", dat[ds + 48 : ds + 56])[0]
    self.afe3_bot_temp = struct.unpack("<d", dat[ds + 56 : ds + 64])[0]
    ds += 64

    self.link_type = dat[ds : ds + 2]
    self.uplink_speed = struct.unpack("<d", dat[ds + 2 : ds + 10])[0]
    self.downlink_speed = struct.unpack("<d", dat[ds + 10 : ds + 18])[0]
    self.link_quality = struct.unpack("<H", dat[ds + 18 : ds + 20])[0]
    self.packet_count = struct.unpack("<I", dat[ds + 20 : ds + 24])[0]
    self.recv_error_count = struct.unpack("<I", dat[ds + 24 : ds + 28])[0]
    self.resent_packet_count = struct.unpack("<I", dat[ds + 28 : ds + 32])[0]
    self.dropped_packet_count = struct.unpack("<I", dat[ds + 32 : ds + 36])[0]
    self.unknown_packet_count = struct.unpack("<I", dat[ds + 36 : ds + 40])[0]
    ds += 40

    self.lost_line_count = struct.unpack("<I", dat[ds : ds + 4])[0]
    self.general_count = struct.unpack("<I", dat[ds + 4 : ds + 8])[0]
    self.sonar_alt_ip = struct.unpack("<I", dat[ds + 8 : ds + 12])[0]
    self.surface_ip = struct.unpack("<I", dat[ds + 12 : ds + 16])[0]
    self.subnet_mask = dat[ds + 16 : ds + 20]
    self.mac_addr = dat[ds + 20 : ds + 26]

    # Two unsigned ints for internal usage
    ds += 26

    # TODO - uint64_t
    self.boot_sts_register = dat[ds : ds + 4]
    self.boot_sts_register_da = dat[ds + 4 : ds + 8]
    self.fpga_time = struct.unpack("<Q", dat[ds + 8 : ds + 16])[0]
    self.dip_switch = dat[ds + 16 : ds + 18]
    # Short for internal usage
    self.shutdown_status = dat[ds + 18 : ds + 20]
    self.net_adap_found = struct.unpack("?", dat[ds + 20 : ds + 21])[0]
    ds += 22

    # self.subsea_internal_temp = struct.unpack('<d', dat[ds:ds+8])[0]
    # self.subsea_cpu_temp = struct.unpack('<d', dat[ds+8:ds+16])[0]
    # self.ui_frame = struct.unpack('<I', dat[ds+16:ds+20])[0]
    # ds += 20

    self.record_size = ds - ids