aboutsummaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--tests/correlator/correlator.grc49
-rwxr-xr-xtests/correlator/correlator.py5
-rw-r--r--tests/correlator/epy_block_0.py1
-rw-r--r--tests/correlator/epy_block_1.py3
-rw-r--r--tests/correlator/epy_block_2.py73
5 files changed, 106 insertions, 25 deletions
diff --git a/tests/correlator/correlator.grc b/tests/correlator/correlator.grc
index c545532..20076de 100644
--- a/tests/correlator/correlator.grc
+++ b/tests/correlator/correlator.grc
@@ -551,7 +551,8 @@ blocks:
- name: epy_block_1
id: epy_block
parameters:
- _source_code: "import numpy as np\nfrom gnuradio import gr\n\nnp.set_printoptions(formatter={'int':hex})\n\
+ _source_code: "import numpy as np\nfrom gnuradio import gr\n\n# remove print for\
+ \ now\nprint = lambda x: None\n\nnp.set_printoptions(formatter={'int':hex})\n\
\nclass blk(gr.sync_block):\n def __init__(self, vlen=1):\n dt = np.byte\
\ if vlen == 1 else (np.byte, vlen)\n\n gr.sync_block.__init__(\n \
\ self,\n name='Printer',\n in_sig=[(np.byte,\
@@ -563,7 +564,7 @@ blocks:
comment: ''
maxoutbuf: '0'
minoutbuf: '0'
- vlen: '20'
+ vlen: len(testvec)
states:
_io_cache: ('Printer', 'blk', [('vlen', '1')], [('0', 'byte', 1)], [], '', [])
bus_sink: false
@@ -575,24 +576,46 @@ blocks:
- name: epy_block_2
id: epy_block
parameters:
- _source_code: "import pmt\nimport numpy as np\nfrom gnuradio import gr\n\n\nclass\
- \ blk(gr.sync_block):\n\n def __init__(self, tag=\"frame_start\", vlen=1):\n\
- \ dt = np.byte if vlen == 1 else (np.byte, vlen)\n\n gr.sync_block.__init__(\n\
+ _source_code: "import pmt\nimport functools\n\nimport numpy as np\nfrom gnuradio\
+ \ import gr\n\n\nclass blk(gr.decim_block):\n\n def __init__(self, tag=\"\
+ frame_start\", vlen=1):\n decim = vlen\n\n gr.decim_block.__init__(\n\
\ self,\n name='Split at tag',\n in_sig=[np.byte],\n\
- \ out_sig=[(np.byte, vlen)]\n )\n\n self.tag = tag\n\
- \ self.vlen = vlen\n\n def work(self, input_items, output_items):\n\
- \ inp = input_items[0]\n\n is_frame_start = lambda tag: pmt.to_python(tag.key)\
- \ == self.tag\n tags = filter(is_frame_start, self.get_tags_in_window(0,\
- \ 0, len(inp)))\n\n counter = self.nitems_written(0)\n offsets\
- \ = map(lambda t: t.offset - counter, tags)\n\n print(list(offsets))\n\
- \n output_items[0][:] = inp.reshape(())\n return len(output_items[0])\n"
+ \ out_sig=[(np.byte, vlen)],\n decim = decim -1\n \
+ \ )\n\n if decim > 1:\n self.set_relative_rate(1. / (decim\
+ \ -1))\n\n self.tag = tag\n self.vlen = vlen\n\n def work(self,\
+ \ input_items, output_items):\n # nicer aliases\n inp = input_items[0]\n\
+ \ inplen = len(inp)\n oup = output_items[0]\n ouplen =\
+ \ len(oup)\n\n is_frame_start = lambda tag: pmt.to_python(tag.key) ==\
+ \ self.tag\n tags = list(filter(is_frame_start, self.get_tags_in_window(0,\
+ \ 0, inplen)))\n\n if len(tags) == 0:\n print(\"There are\
+ \ not tags!\")\n\n # get indices of where the frames are\n counter\
+ \ = self.nitems_written(0) # * self.vlen\n offsets = list(map(lambda\
+ \ t: t.offset - counter, tags))\n indices = list(zip(offsets, offsets[1:]))\n\
+ \n print(list(map(lambda t: t.offset, tags)))\n print(offsets)\n\
+ \ print(indices)\n\n # Get chunks\n def get_inp_chunk(index_pair):\
+ \ \n # read a chunk from the inp array\n # if there are\
+ \ not enough values the rest is padded with zeros,\n # if there are\
+ \ too many values, they get cut off\n start = index_pair[0]\n \
+ \ end = index_pair[1]\n length = end - start\n\n \
+ \ assert start != end\n\n print(f\"getting chunk from {start}\
+ \ to {end} of length {length}\")\n\n # not enough values\n \
+ \ if length < self.vlen:\n pad = self.vlen - length\n \
+ \ print(f\"Chunk was too short! Adding {pad} zeros to pad\")\n\
+ \ return np.concatenate([inp[start:end], np.zeros(pad)])\n\n\
+ \ # too many values\n if length > self.vlen:\n \
+ \ print(f\"Chunk was too long! Cutting off {length - self.vlen} values\"\
+ )\n end = start + self.vlen\n return inp[start:end]\n\
+ \n # okay\n rv = inp[start:end]\n print(rv)\n\
+ \ return rv\n\n chunks = list(map(get_inp_chunk, indices))\n\
+ \n assert len(chunks) != 0\n\n print(chunks)\n oup[:] =\
+ \ np.concatenate(chunks).reshape((-1, self.vlen))\n\n return len(oup)\n"
affinity: ''
alias: ''
comment: ''
maxoutbuf: '0'
minoutbuf: '0'
tag: '''frame_start'''
- vlen: '20'
+ vlen: len(testvec)
states:
_io_cache: ('Split at tag', 'blk', [('tag', "'frame_start'"), ('vlen', '1')],
[('0', 'byte', 1)], [('0', 'byte', 1)], '', ['tag', 'vlen'])
diff --git a/tests/correlator/correlator.py b/tests/correlator/correlator.py
index c84e4d7..173c14f 100755
--- a/tests/correlator/correlator.py
+++ b/tests/correlator/correlator.py
@@ -330,8 +330,8 @@ class correlator(gr.top_block, Qt.QWidget):
self.top_grid_layout.setRowStretch(r, 1)
for c in range(1, 2):
self.top_grid_layout.setColumnStretch(c, 1)
- self.epy_block_2 = epy_block_2.blk(tag='frame_start', vlen=20)
- self.epy_block_1 = epy_block_1.blk(vlen=20)
+ self.epy_block_2 = epy_block_2.blk(tag='frame_start', vlen=len(testvec))
+ self.epy_block_1 = epy_block_1.blk(vlen=len(testvec))
self.epy_block_0 = epy_block_0.blk()
self.digital_pfb_clock_sync_xxx_0 = digital.pfb_clock_sync_ccf(sps, timing_loop_bw, rrc_taps, nfilts, 16, 1.5, 1)
self.digital_costas_loop_cc_0 = digital.costas_loop_cc(2 * 3.141592653589793 / 100, 4, False)
@@ -431,6 +431,7 @@ class correlator(gr.top_block, Qt.QWidget):
def set_testvec(self, testvec):
self.testvec = testvec
self.blocks_vector_source_x_0.set_data(self.testvec * 500, [])
+ self.epy_block_2.vlen = len(self.testvec)
def get_samp_rate(self):
return self.samp_rate
diff --git a/tests/correlator/epy_block_0.py b/tests/correlator/epy_block_0.py
index e32d72b..a0b4cd9 100644
--- a/tests/correlator/epy_block_0.py
+++ b/tests/correlator/epy_block_0.py
@@ -3,6 +3,7 @@ import pmt
import numpy as np
from gnuradio import gr
+# hide debugging print statements for the moment
print = lambda x: None
class blk(gr.sync_block):
diff --git a/tests/correlator/epy_block_1.py b/tests/correlator/epy_block_1.py
index 7e722fc..8ace6e4 100644
--- a/tests/correlator/epy_block_1.py
+++ b/tests/correlator/epy_block_1.py
@@ -1,6 +1,9 @@
import numpy as np
from gnuradio import gr
+# remove print for now
+print = lambda x: None
+
np.set_printoptions(formatter={'int':hex})
class blk(gr.sync_block):
diff --git a/tests/correlator/epy_block_2.py b/tests/correlator/epy_block_2.py
index 210be12..0ca2cbb 100644
--- a/tests/correlator/epy_block_2.py
+++ b/tests/correlator/epy_block_2.py
@@ -1,33 +1,86 @@
import pmt
+import functools
+
import numpy as np
from gnuradio import gr
-class blk(gr.sync_block):
+class blk(gr.decim_block):
def __init__(self, tag="frame_start", vlen=1):
- dt = np.byte if vlen == 1 else (np.byte, vlen)
+ decim = vlen
- gr.sync_block.__init__(
+ gr.decim_block.__init__(
self,
name='Split at tag',
in_sig=[np.byte],
- out_sig=[(np.byte, vlen)]
+ out_sig=[(np.byte, vlen)],
+ decim = decim -1
)
+ if decim > 1:
+ self.set_relative_rate(1. / (decim -1))
+
self.tag = tag
self.vlen = vlen
def work(self, input_items, output_items):
+ # nicer aliases
inp = input_items[0]
+ inplen = len(inp)
+ oup = output_items[0]
+ ouplen = len(oup)
is_frame_start = lambda tag: pmt.to_python(tag.key) == self.tag
- tags = filter(is_frame_start, self.get_tags_in_window(0, 0, len(inp)))
+ tags = list(filter(is_frame_start, self.get_tags_in_window(0, 0, inplen)))
+
+ if len(tags) == 0:
+ print("There are not tags!")
+
+ # get indices of where the frames are
+ counter = self.nitems_written(0) # * self.vlen
+ offsets = list(map(lambda t: t.offset - counter, tags))
+ indices = list(zip(offsets, offsets[1:]))
+
+ print(list(map(lambda t: t.offset, tags)))
+ print(offsets)
+ print(indices)
+
+ # Get chunks
+ def get_inp_chunk(index_pair):
+ # read a chunk from the inp array
+ # if there are not enough values the rest is padded with zeros,
+ # if there are too many values, they get cut off
+ start = index_pair[0]
+ end = index_pair[1]
+ length = end - start
+
+ assert start != end
+
+ print(f"getting chunk from {start} to {end} of length {length}")
+
+ # not enough values
+ if length < self.vlen:
+ pad = self.vlen - length
+ print(f"Chunk was too short! Adding {pad} zeros to pad")
+ return np.concatenate([inp[start:end], np.zeros(pad)])
+
+ # too many values
+ if length > self.vlen:
+ print(f"Chunk was too long! Cutting off {length - self.vlen} values")
+ end = start + self.vlen
+ return inp[start:end]
+
+ # okay
+ rv = inp[start:end]
+ print(rv)
+ return rv
+
+ chunks = list(map(get_inp_chunk, indices))
- counter = self.nitems_written(0)
- offsets = map(lambda t: t.offset - counter, tags)
+ assert len(chunks) != 0
- print(list(offsets))
+ print(chunks)
+ oup[:] = np.concatenate(chunks).reshape((-1, self.vlen))
- output_items[0][:] = inp
- return len(output_items[0])
+ return len(oup)