]> code.octet-stream.net Git - m17rt/blob - m17codec2/src/lib.rs
Fix timing bugs and add documentation
[m17rt] / m17codec2 / src / lib.rs
1 #![doc = include_str!("../README.md")]
2
3 use codec2::{Codec2, Codec2Mode};
4 use cpal::traits::DeviceTrait;
5 use cpal::traits::HostTrait;
6 use cpal::traits::StreamTrait;
7 use cpal::{Sample, SampleFormat, SampleRate};
8 use log::debug;
9 use m17app::adapter::StreamAdapter;
10 use m17app::app::TxHandle;
11 use m17app::link_setup::LinkSetup;
12 use m17app::link_setup::M17Address;
13 use m17app::StreamFrame;
14 use std::collections::VecDeque;
15 use std::fs::File;
16 use std::io::Write;
17 use std::path::Path;
18 use std::path::PathBuf;
19 use std::sync::{
20 mpsc::{channel, Receiver, Sender},
21 Arc, Mutex,
22 };
23 use std::time::Duration;
24 use std::time::Instant;
25
26 pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
27 let codec2 = Codec2::new(Codec2Mode::MODE_3200);
28 let var_name = codec2;
29 let mut codec = var_name;
30 let mut all_samples: Vec<i16> = vec![];
31 for i in 0..(data.len() / 8) {
32 let mut samples = vec![0; codec.samples_per_frame()];
33 codec.decode(&mut samples, &data[i * 8..((i + 1) * 8)]);
34 all_samples.append(&mut samples);
35 }
36
37 // dude this works
38 let mut speech_out = File::create(out_path).unwrap();
39 for b in &all_samples {
40 speech_out.write_all(&b.to_le_bytes()).unwrap();
41 }
42 all_samples
43 }
44
45 /// Subscribes to M17 streams and attempts to play the decoded Codec2
46 pub struct Codec2Adapter {
47 state: Arc<Mutex<AdapterState>>,
48 // TODO: make this configurable
49 output_card: String,
50 }
51
52 impl Codec2Adapter {
53 pub fn new() -> Self {
54 Self {
55 state: Arc::new(Mutex::new(AdapterState {
56 tx: None,
57 out_buf: VecDeque::new(),
58 codec2: Codec2::new(Codec2Mode::MODE_3200),
59 end_tx: None,
60 })),
61 // TODO: this doesn't work on rpi. Use default_output_device() by default
62 output_card: "default".to_owned(),
63 }
64 }
65 }
66
67 impl Default for Codec2Adapter {
68 fn default() -> Self {
69 Self::new()
70 }
71 }
72
73 struct AdapterState {
74 tx: Option<TxHandle>,
75 /// Circular buffer of output samples for playback
76 out_buf: VecDeque<i16>,
77 codec2: Codec2,
78 end_tx: Option<Sender<()>>,
79 }
80
81 impl StreamAdapter for Codec2Adapter {
82 fn adapter_registered(&self, _id: usize, handle: TxHandle) {
83 self.state.lock().unwrap().tx = Some(handle);
84
85 let (end_tx, end_rx) = channel();
86 let state = self.state.clone();
87 let output_card = self.output_card.clone();
88 std::thread::spawn(move || stream_thread(end_rx, state, output_card));
89 self.state.lock().unwrap().end_tx = Some(end_tx);
90 }
91
92 fn adapter_removed(&self) {
93 let mut state = self.state.lock().unwrap();
94 state.tx = None;
95 state.end_tx = None;
96 }
97
98 fn tnc_started(&self) {}
99
100 fn tnc_closed(&self) {}
101
102 fn stream_began(&self, _link_setup: LinkSetup) {
103 // for now we will assume:
104 // - unencrypted
105 // - data type is Voice (Codec2 3200), not Voice+Data
106 // TODO: is encryption handled here or in M17App, such that we get a decrypted stream?
107 // TODO: handle the Voice+Data combination with Codec2 1600
108 self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
109 }
110
111 fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
112 let mut state = self.state.lock().unwrap();
113 for encoded in data.chunks(8) {
114 if state.out_buf.len() < 1024 {
115 let mut samples = [i16::EQUILIBRIUM; 160]; // while assuming 3200
116 state.codec2.decode(&mut samples, encoded);
117 // TODO: maybe get rid of VecDeque so we can decode directly into ring buffer?
118 for s in samples {
119 state.out_buf.push_back(s);
120 }
121 } else {
122 debug!("out_buf overflow");
123 }
124 }
125 }
126 }
127
128 fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
129 let mut state = state.lock().unwrap();
130 for d in data {
131 *d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
132 }
133 }
134
135 /// Create and manage the stream from a dedicated thread since it's `!Send`
136 fn stream_thread(end: Receiver<()>, state: Arc<Mutex<AdapterState>>, output_card: String) {
137 let host = cpal::default_host();
138 let device = host
139 .output_devices()
140 .unwrap()
141 .find(|d| d.name().unwrap() == output_card)
142 .unwrap();
143 let mut configs = device.supported_output_configs().unwrap();
144 // TODO: channels == 1 doesn't work on a Raspberry Pi
145 // make this configurable and support interleaving LRLR stereo samples if using 2 channels
146 let config = configs
147 .find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
148 .unwrap()
149 .with_sample_rate(SampleRate(8000));
150 let stream = device
151 .build_output_stream(
152 &config.into(),
153 move |data: &mut [i16], _info: &cpal::OutputCallbackInfo| {
154 output_cb(data, &state);
155 },
156 |e| {
157 // trigger end_tx here? always more edge cases
158 debug!("error occurred in codec2 playback: {e:?}");
159 },
160 None,
161 )
162 .unwrap();
163 stream.play().unwrap();
164 let _ = end.recv();
165 // it seems concrete impls of Stream have a Drop implementation that will handle termination
166 }
167
168 /// Transmits a wave file as an M17 stream
169 pub struct WavePlayer;
170
171 impl WavePlayer {
172 /// Plays a wave file (blocking).
173 ///
174 /// * `path`: wave file to transmit, must be 8 kHz mono and 16-bit LE
175 /// * `tx`: a `TxHandle` obtained from an `M17App`
176 /// * `source`: address of transmission source
177 /// * `destination`: address of transmission destination
178 /// * `channel_access_number`: from 0 to 15, usually 0
179 pub fn play(
180 path: PathBuf,
181 tx: TxHandle,
182 source: &M17Address,
183 destination: &M17Address,
184 channel_access_number: u8,
185 ) {
186 let mut reader = hound::WavReader::open(path).unwrap();
187 let mut samples = reader.samples::<i16>();
188
189 let mut codec = Codec2::new(Codec2Mode::MODE_3200);
190 let mut in_buf = [0i16; 160];
191 let mut out_buf = [0u8; 16];
192 let mut lsf_chunk: usize = 0;
193 const TICK: Duration = Duration::from_millis(40);
194 let mut next_tick = Instant::now() + TICK;
195 let mut frame_number = 0;
196
197 let mut setup = LinkSetup::new_voice(source, destination);
198 setup.set_channel_access_number(channel_access_number);
199 tx.transmit_stream_start(&setup);
200
201 loop {
202 let mut last_one = false;
203 for out in out_buf.chunks_mut(8) {
204 for i in in_buf.iter_mut() {
205 let sample = match samples.next() {
206 Some(Ok(sample)) => sample,
207 _ => {
208 last_one = true;
209 0
210 }
211 };
212 *i = sample;
213 }
214 codec.encode(out, &in_buf);
215 }
216 tx.transmit_stream_next(&StreamFrame {
217 lich_idx: lsf_chunk as u8,
218 lich_part: setup.lich_part(lsf_chunk as u8),
219 frame_number,
220 end_of_stream: last_one,
221 stream_data: out_buf,
222 });
223 frame_number += 1;
224 lsf_chunk = (lsf_chunk + 1) % 6;
225
226 if last_one {
227 break;
228 }
229
230 std::thread::sleep(next_tick.duration_since(Instant::now()));
231 next_tick += TICK;
232 }
233 }
234 }