source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f"
-[[package]]
-name = "demod"
-version = "0.1.0"
-dependencies = [
- "cpal",
- "env_logger",
- "log",
- "m17app",
- "m17codec2",
- "m17core",
-]
-
[[package]]
name = "either"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
+[[package]]
+name = "hound"
+version = "3.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62adaabb884c94955b19907d60019f4e145d091c75345379e70d1ee696f7854f"
+
[[package]]
name = "humantime"
version = "2.1.0"
dependencies = [
"codec2",
"cpal",
+ "hound",
"log",
"m17app",
"m17core",
"log",
]
+[[package]]
+name = "m17rt-demod"
+version = "0.1.0"
+dependencies = [
+ "cpal",
+ "env_logger",
+ "log",
+ "m17app",
+ "m17codec2",
+ "m17core",
+]
+
+[[package]]
+name = "m17rt-mod"
+version = "0.1.0"
+dependencies = [
+ "env_logger",
+ "log",
+ "m17app",
+ "m17codec2",
+ "m17core",
+]
+
[[package]]
name = "mach2"
version = "0.4.2"
[workspace]
resolver = "2"
-members = [ "demod",
- "m17app", "m17codec2", "m17core",
+members = [
+ "m17app", "m17codec2", "m17core", "tools/m17rt-demod", "tools/m17rt-mod",
]
+++ /dev/null
-[package]
-name = "demod"
-version = "0.1.0"
-edition = "2021"
-license = "MIT"
-authors = ["Thomas Karpiniec <tom.karpiniec@outlook.com"]
-publish = false
-
-[dependencies]
-m17core = { path = "../m17core" }
-m17app = { path = "../m17app" }
-m17codec2 = { path = "../m17codec2" }
-
-cpal = "0.15.3"
-env_logger = "0.11.6"
-log = "0.4.22"
+++ /dev/null
-use m17app::app::M17App;
-use m17app::soundmodem::{InputRrcFile, InputSoundcard, NullOutputSink, Soundmodem};
-use m17codec2::Codec2Adapter;
-use std::path::PathBuf;
-
-pub fn m17app_test() {
- let path = PathBuf::from("../../Data/test_vk7xt.rrc");
- let source = InputRrcFile::new(path);
- //let source = InputSoundcard::new();
- let soundmodem = Soundmodem::new_with_input_and_output(source, NullOutputSink::new());
- let app = M17App::new(soundmodem);
- app.add_stream_adapter(Codec2Adapter::new());
- app.start();
- std::thread::sleep(std::time::Duration::from_secs(15));
-}
-
-fn main() {
- env_logger::init();
- m17app_test();
-}
use crate::adapter::{PacketAdapter, StreamAdapter};
use crate::tnc::Tnc;
use m17core::kiss::{KissBuffer, KissCommand, KissFrame};
-use m17core::protocol::{EncryptionType, LsfFrame, PacketType};
+use m17core::protocol::{EncryptionType, LsfFrame, PacketType, StreamFrame};
use log::debug;
use std::collections::HashMap;
// add more methods here for stream outgoing
- pub fn transmit_stream_start(&self /* lsf?, payload? what needs to be configured ?! */) {}
+ pub fn transmit_stream_start(&self, lsf: LsfFrame) {
+ // TODO: is asking for an LsfFrame a good idea or unfriendly API?
+ // What I should do here is create a LinkSetup struct which wraps an LsfFrame and can be loaded with a raw one
+ let kiss_frame = KissFrame::new_stream_setup(&lsf.0).unwrap();
+ let _ = self.event_tx.send(TncControlEvent::Kiss(kiss_frame));
+ }
// as long as there is only one TNC it is implied there is only ever one stream transmission in flight
- pub fn transmit_stream_next(&self, /* next payload, */ end_of_stream: bool) {}
+ pub fn transmit_stream_next(&self, stream: StreamFrame) {
+ let kiss_frame = KissFrame::new_stream_data(&stream).unwrap();
+ let _ = self.event_tx.send(TncControlEvent::Kiss(kiss_frame));
+ }
}
/// Synchronised structure for listeners subscribing to packets and streams.
.build_input_stream(
&config.into(),
move |data: &[i16], _info: &cpal::InputCallbackInfo| {
- debug!("input has given us {} samples", data.len());
let out: Vec<i16> = data.iter().map(|s| *s).collect();
let _ = samples.try_send(SoundmodemEvent::BasebandInput(out.into()));
},
}
}
+pub struct NullInputSource {
+ end_tx: Mutex<Option<Sender<()>>>,
+}
+
+impl NullInputSource {
+ pub fn new() -> Self {
+ Self {
+ end_tx: Mutex::new(None),
+ }
+ }
+}
+
+impl InputSource for NullInputSource {
+ fn start(&self, samples: SyncSender<SoundmodemEvent>) {
+ let (end_tx, end_rx) = channel();
+ std::thread::spawn(move || {
+ // assuming 48 kHz for now
+ const TICK: Duration = Duration::from_millis(25);
+ const SAMPLES_PER_TICK: usize = 1200;
+ let mut next_tick = Instant::now() + TICK;
+
+ loop {
+ std::thread::sleep(next_tick.duration_since(Instant::now()));
+ next_tick = next_tick + TICK;
+ if end_rx.try_recv() != Err(TryRecvError::Empty) {
+ break;
+ }
+ if let Err(e) = samples.try_send(SoundmodemEvent::BasebandInput(
+ [0i16; SAMPLES_PER_TICK].into(),
+ )) {
+ debug!("overflow feeding soundmodem: {e:?}");
+ }
+ }
+ });
+ *self.end_tx.lock().unwrap() = Some(end_tx);
+ }
+
+ fn close(&self) {
+ let _ = self.end_tx.lock().unwrap().take();
+ }
+}
+
pub struct OutputBuffer {
idling: bool,
// TODO: something more efficient
if end_rx.try_recv() != Err(TryRecvError::Empty) {
break;
}
+ // For now only write deliberately modulated (non-idling) samples
+ // Multiple transmissions will get smooshed together
+ let mut buf_used = 0;
let mut buffer = buffer.write().unwrap();
for out in buf.chunks_mut(2) {
if let Some(s) = buffer.samples.pop_front() {
- let be = s.to_be_bytes();
+ let be = s.to_le_bytes();
out.copy_from_slice(&[be[0], be[1]]);
- } else if buffer.idling {
- out.copy_from_slice(&[0, 0]);
- } else {
+ buf_used += 2;
+ } else if !buffer.idling {
debug!("output rrc file had underrun");
let _ = event_tx.send(SoundmodemEvent::OutputUnderrun);
break;
}
}
- if let Err(e) = file.write_all(&buf) {
+ if let Err(e) = file.write_all(&buf[0..buf_used]) {
debug!("failed to write to rrc file: {e:?}");
break;
}
+ let _ = event_tx.send(SoundmodemEvent::DidReadFromOutputBuffer {
+ len: buf_used / 2,
+ timestamp: Instant::now(),
+ });
}
});
*self.end_tx.lock().unwrap() = Some(end_tx);
}
let mut buffer = buffer.write().unwrap();
+ let mut taken = 0;
for _ in 0..SAMPLES_PER_TICK {
- if !buffer.samples.pop_front().is_some() && !buffer.idling {
- debug!("null output had underrun");
- let _ = event_tx.send(SoundmodemEvent::OutputUnderrun);
- break;
+ if !buffer.samples.pop_front().is_some() {
+ if !buffer.idling {
+ debug!("null output had underrun");
+ let _ = event_tx.send(SoundmodemEvent::OutputUnderrun);
+ break;
+ }
+ } else {
+ taken += 1;
}
}
+ let _ = event_tx.send(SoundmodemEvent::DidReadFromOutputBuffer {
+ len: taken,
+ timestamp: Instant::now(),
+ });
}
});
*self.end_tx.lock().unwrap() = Some(end_tx);
[dependencies]
codec2 = "0.3.0"
cpal = "0.15.3"
+hound = "3.5.1"
m17core = { path = "../m17core" }
m17app = { path = "../m17app" }
log = "0.4.22"
+
use log::debug;
use m17app::adapter::StreamAdapter;
use m17app::app::TxHandle;
+use m17core::address::Address;
+use m17core::address::Callsign;
use m17core::protocol::LsfFrame;
+use m17core::protocol::StreamFrame;
use std::collections::VecDeque;
use std::fs::File;
use std::io::Write;
use std::path::Path;
+use std::path::PathBuf;
use std::sync::{
mpsc::{channel, Receiver, Sender},
Arc, Mutex,
};
+use std::time::Duration;
+use std::time::Instant;
pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
let codec2 = Codec2::new(Codec2Mode::MODE_3200);
fn tnc_closed(&self) {}
- fn stream_began(&self, lsf: LsfFrame) {
+ fn stream_began(&self, _lsf: LsfFrame) {
// for now we will assume:
// - unencrypted
// - data type is Voice (Codec2 3200), not Voice+Data
self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
}
- fn stream_data(&self, frame_number: u16, is_final: bool, data: Arc<[u8; 16]>) {
+ fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
let mut state = self.state.lock().unwrap();
for encoded in data.chunks(8) {
if state.out_buf.len() < 1024 {
fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
let mut state = state.lock().unwrap();
- debug!(
- "sound card wants {} samples, we have {} in the buffer",
- data.len(),
- state.out_buf.len()
- );
for d in data {
*d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
}
.build_output_stream(
&config.into(),
move |data: &mut [i16], info: &cpal::OutputCallbackInfo| {
- debug!(
- "callback {:?} playback {:?}",
- info.timestamp().callback,
- info.timestamp().playback
- );
output_cb(data, &state);
},
|e| {
let _ = end.recv();
// it seems concrete impls of Stream have a Drop implementation that will handle termination
}
+
+pub struct WavePlayer;
+
+impl WavePlayer {
+ pub fn play(path: PathBuf, tx: TxHandle) {
+ let mut reader = hound::WavReader::open(path).unwrap();
+ let mut samples = reader.samples::<i16>();
+
+ let mut codec = Codec2::new(Codec2Mode::MODE_3200);
+ let mut in_buf = [0i16; 160];
+ let mut out_buf = [0u8; 16];
+ let mut lsf_chunk: usize = 0;
+ const TICK: Duration = Duration::from_millis(40);
+ let mut next_tick = Instant::now() + TICK;
+ let mut frame_number = 0;
+
+ // TODO: need a better way to create addresses from std strings
+
+ let lsf = LsfFrame::new_voice(
+ &Address::Callsign(Callsign(b"VK7XT ".clone())),
+ &Address::Broadcast,
+ );
+
+ tx.transmit_stream_start(lsf.clone());
+
+ loop {
+ let mut last_one = false;
+ for mut out in out_buf.chunks_mut(8) {
+ for i in 0..160 {
+ let sample = match samples.next() {
+ Some(Ok(sample)) => sample,
+ _ => {
+ last_one = true;
+ 0
+ }
+ };
+ in_buf[i] = sample;
+ }
+ codec.encode(&mut out, &in_buf);
+ }
+ tx.transmit_stream_next(StreamFrame {
+ lich_idx: lsf_chunk as u8,
+ lich_part: lsf.0[lsf_chunk * 5..(lsf_chunk + 1) * 5]
+ .try_into()
+ .unwrap(),
+ frame_number,
+ end_of_stream: last_one,
+ stream_data: out_buf.clone(),
+ });
+ frame_number += 1;
+ lsf_chunk = (lsf_chunk + 1) % 6;
+
+ if last_one {
+ break;
+ }
+
+ std::thread::sleep(next_tick.duration_since(Instant::now()));
+ next_tick += TICK;
+ }
+ }
+}
if gain < SYNC_MIN_GAIN {
return (f32::MAX, gain, shift);
}
+
let mut diff = 0.0;
for i in 0..8 {
let sym_diff = (((samples[i * 10] - shift) / gain) - target[i] as f32).abs();
}
pub struct SoftModulator {
+ // TODO: 2000 was overflowing around EOT, track down why
/// Next modulated frame to output - 1920 samples for 40ms frame plus 80 for ramp-down
- next_transmission: [i16; 2000],
+ next_transmission: [i16; 4000],
/// How much of next_transmission should in fact be transmitted
next_len: usize,
/// How much of next_transmission has been read out
impl SoftModulator {
pub fn new() -> Self {
Self {
- next_transmission: [0i16; 2000],
+ next_transmission: [0i16; 4000],
next_len: 0,
next_read: 0,
tx_delay_padding: 0,
}
fn push_sample(&mut self, dibit: f32) {
- // Right now we are encoding everything as 1.0-scaled dibit floats
- // This is a bit silly but it will do for a minute
- // Max theoretical gain from the RRC filter is 4.328
- // Let's bump everything to a baseline of 16383 / 4.328 = 3785.35
- // This is not particularly high but at least we won't ever hit the top
- self.filter_win[self.filter_cursor] = dibit * 3785.0;
- self.filter_cursor = (self.filter_cursor + 1) % 81;
- let mut out: f32 = 0.0;
- for i in 0..81 {
- let filter_idx = (self.filter_cursor + i) % 81;
- out += RRC_48K[i] * self.filter_win[filter_idx];
+ // TODO: 48 kHz assumption again
+ for i in 0..10 {
+ // Right now we are encoding everything as 1.0-scaled dibit floats
+ // This is a bit silly but it will do for a minute
+ // Max theoretical gain from the RRC filter is 4.328
+ // Let's bump everything to a baseline of 16383 / 4.328 = 3785.35
+ // This is not particularly high but at least we won't ever hit the top
+ if i == 0 {
+ // 10x the impulse with zeroes between for upsampling
+ self.filter_win[self.filter_cursor] = dibit * 3785.0 * 10.0;
+ } else {
+ self.filter_win[self.filter_cursor] = 0.0;
+ }
+ self.filter_cursor = (self.filter_cursor + 1) % 81;
+ let mut out: f32 = 0.0;
+ for i in 0..81 {
+ let filter_idx = (self.filter_cursor + i) % 81;
+ out += RRC_48K[i] * self.filter_win[filter_idx];
+ }
+ self.next_transmission[self.next_len] = out as i16;
+ self.next_len += 1;
}
- self.next_transmission[self.next_len] = out as i16;
- self.next_len += 1;
}
fn request_frame_if_space(&mut self) {
capacity: usize,
output_latency: usize,
) {
+ //log::debug!("modulator update_output_buffer {samples_to_play} {capacity} {output_latency}");
self.output_latency = output_latency;
self.buf_capacity = capacity;
self.samples_in_buf = samples_to_play;
// then follow it with whatever might be left in next_transmission
let next_remaining = self.next_len - self.next_read;
if next_remaining > 0 {
- let len = (out.len() - written).max(next_remaining);
+ let len = (out.len() - written).min(next_remaining);
out[written..(written + len)]
.copy_from_slice(&self.next_transmission[self.next_read..(self.next_read + len)]);
self.next_read += len;
pub struct LsfFrame(pub [u8; 30]);
impl LsfFrame {
+ pub fn new_voice(source: &Address, destination: &Address) -> Self {
+ let mut out = Self([0u8; 30]);
+ out.set_source(source);
+ out.set_destination(destination);
+ out.set_mode(Mode::Stream);
+ out.set_data_type(DataType::Voice);
+ out.set_encryption_type(EncryptionType::None);
+ out
+ }
+
pub fn new_packet(source: &Address, destination: &Address) -> Self {
let mut out = Self([0u8; 30]);
out.set_source(source);
}
self.stream_curr = (self.stream_curr + 1) % 8;
if frame.end_of_stream {
- self.state = State::Idle;
+ self.state = State::TxStreamSentEndOfStream;
}
Some(ModulatorFrame::Stream(frame))
}
self.packet_full = true;
}
} else if port == PORT_STREAM {
- // TODO: handle port 2
+ let mut payload = [0u8; 30];
+ let Ok(len) = kiss_frame.decode_payload(&mut payload) else {
+ continue;
+ };
+ if len < 26 {
+ log::debug!("payload len too short");
+ continue;
+ }
+ if len == 30 {
+ let lsf = LsfFrame(payload);
+ if lsf.check_crc() != 0 {
+ continue;
+ }
+ self.stream_pending_lsf = Some(lsf);
+ } else {
+ if self.stream_full {
+ log::debug!("stream full");
+ continue;
+ }
+ let frame_num_part = u16::from_be_bytes([payload[6], payload[7]]);
+ self.stream_queue[self.stream_next] = StreamFrame {
+ lich_idx: payload[5] >> 5,
+ lich_part: payload[0..5].try_into().unwrap(),
+ frame_number: frame_num_part & 0x7fff,
+ end_of_stream: frame_num_part & 0x8000 > 0,
+ stream_data: payload[8..24].try_into().unwrap(),
+ };
+ self.stream_next = (self.stream_next + 1) % 8;
+ if self.stream_next == self.stream_curr {
+ self.stream_full = true;
+ }
+ }
}
}
n
--- /dev/null
+[package]
+name = "m17rt-demod"
+version = "0.1.0"
+edition = "2021"
+license = "MIT"
+authors = ["Thomas Karpiniec <tom.karpiniec@outlook.com"]
+publish = false
+
+[dependencies]
+m17core = { path = "../../m17core" }
+m17app = { path = "../../m17app" }
+m17codec2 = { path = "../../m17codec2" }
+
+cpal = "0.15.3"
+env_logger = "0.11.6"
+log = "0.4.22"
--- /dev/null
+use m17app::app::M17App;
+use m17app::soundmodem::{InputRrcFile, InputSoundcard, NullOutputSink, Soundmodem};
+use m17codec2::Codec2Adapter;
+use std::path::PathBuf;
+
+pub fn m17app_test() {
+ //let path = PathBuf::from("../../../Data/test_vk7xt.rrc");
+ let path = PathBuf::from("../../../Data/mymod.rrc");
+ let source = InputRrcFile::new(path);
+ //let source = InputSoundcard::new();
+ let soundmodem = Soundmodem::new_with_input_and_output(source, NullOutputSink::new());
+ let app = M17App::new(soundmodem);
+ app.add_stream_adapter(Codec2Adapter::new());
+ app.start();
+ std::thread::sleep(std::time::Duration::from_secs(15));
+}
+
+fn main() {
+ env_logger::init();
+ m17app_test();
+}
--- /dev/null
+[package]
+name = "m17rt-mod"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+m17core = { path = "../../m17core" }
+m17app = { path = "../../m17app" }
+m17codec2 = { path = "../../m17codec2" }
+
+env_logger = "0.11.6"
+log = "0.4.22"
--- /dev/null
+use m17app::app::M17App;
+use m17app::soundmodem::{
+ InputRrcFile, InputSoundcard, NullInputSource, NullOutputSink, OutputRrcFile, Soundmodem,
+};
+use m17codec2::{Codec2Adapter, WavePlayer};
+use std::path::PathBuf;
+
+pub fn mod_test() {
+ let in_path = PathBuf::from("../../../Data/test_vk7xt_8k.wav");
+ let out_path = PathBuf::from("../../../Data/mymod.rrc");
+ let soundmodem =
+ Soundmodem::new_with_input_and_output(NullInputSource::new(), OutputRrcFile::new(out_path));
+ let app = M17App::new(soundmodem);
+ app.start();
+ std::thread::sleep(std::time::Duration::from_secs(1));
+ println!("Beginning playback...");
+ WavePlayer::play(in_path, app.tx());
+ println!("Playback complete, terminating in 5 secs");
+ std::thread::sleep(std::time::Duration::from_secs(5));
+}
+
+fn main() {
+ env_logger::init();
+ mod_test();
+}