]> code.octet-stream.net Git - m17rt/blob - m17codec2/src/lib.rs
Configurable output card for codec2 adapter
[m17rt] / m17codec2 / src / lib.rs
1 #![doc = include_str!("../README.md")]
2
3 use codec2::{Codec2, Codec2Mode};
4 use cpal::traits::DeviceTrait;
5 use cpal::traits::HostTrait;
6 use cpal::traits::StreamTrait;
7 use cpal::{Sample, SampleFormat, SampleRate};
8 use log::debug;
9 use m17app::adapter::StreamAdapter;
10 use m17app::app::TxHandle;
11 use m17app::error::AdapterError;
12 use m17app::link_setup::LinkSetup;
13 use m17app::link_setup::M17Address;
14 use m17app::StreamFrame;
15 use std::collections::VecDeque;
16 use std::fs::File;
17 use std::io::Write;
18 use std::path::Path;
19 use std::path::PathBuf;
20 use std::sync::{
21 mpsc::{channel, Receiver, Sender},
22 Arc, Mutex,
23 };
24 use std::time::Duration;
25 use std::time::Instant;
26 use thiserror::Error;
27
28 pub fn decode_codec2<P: AsRef<Path>>(data: &[u8], out_path: P) -> Vec<i16> {
29 let codec2 = Codec2::new(Codec2Mode::MODE_3200);
30 let var_name = codec2;
31 let mut codec = var_name;
32 let mut all_samples: Vec<i16> = vec![];
33 for i in 0..(data.len() / 8) {
34 let mut samples = vec![0; codec.samples_per_frame()];
35 codec.decode(&mut samples, &data[i * 8..((i + 1) * 8)]);
36 all_samples.append(&mut samples);
37 }
38
39 // dude this works
40 let mut speech_out = File::create(out_path).unwrap();
41 for b in &all_samples {
42 speech_out.write_all(&b.to_le_bytes()).unwrap();
43 }
44 all_samples
45 }
46
47 /// Subscribes to M17 streams and attempts to play the decoded Codec2
48 pub struct Codec2Adapter {
49 state: Arc<Mutex<AdapterState>>,
50 output_card: Option<String>,
51 }
52
53 impl Codec2Adapter {
54 pub fn new() -> Self {
55 Self {
56 state: Arc::new(Mutex::new(AdapterState {
57 tx: None,
58 out_buf: VecDeque::new(),
59 codec2: Codec2::new(Codec2Mode::MODE_3200),
60 end_tx: None,
61 })),
62 // TODO: this doesn't work on rpi. Use default_output_device() by default
63 output_card: None,
64 }
65 }
66
67 pub fn set_output_card<S: Into<String>>(&mut self, card_name: S) {
68 self.output_card = Some(card_name.into());
69 }
70 }
71
72 impl Default for Codec2Adapter {
73 fn default() -> Self {
74 Self::new()
75 }
76 }
77
78 struct AdapterState {
79 tx: Option<TxHandle>,
80 /// Circular buffer of output samples for playback
81 out_buf: VecDeque<i16>,
82 codec2: Codec2,
83 end_tx: Option<Sender<()>>,
84 }
85
86 impl StreamAdapter for Codec2Adapter {
87 fn start(&self, handle: TxHandle) -> Result<(), AdapterError> {
88 self.state.lock().unwrap().tx = Some(handle);
89
90 let (end_tx, end_rx) = channel();
91 let (setup_tx, setup_rx) = channel();
92 let state = self.state.clone();
93 let output_card = self.output_card.clone();
94 std::thread::spawn(move || stream_thread(end_rx, setup_tx, state, output_card));
95 self.state.lock().unwrap().end_tx = Some(end_tx);
96 // Propagate any errors arising in the thread
97 setup_rx.recv()?
98 }
99
100 fn close(&self) -> Result<(), AdapterError> {
101 let mut state = self.state.lock().unwrap();
102 state.tx = None;
103 state.end_tx = None;
104 Ok(())
105 }
106
107 fn stream_began(&self, _link_setup: LinkSetup) {
108 // for now we will assume:
109 // - unencrypted
110 // - data type is Voice (Codec2 3200), not Voice+Data
111 // TODO: is encryption handled here or in M17App, such that we get a decrypted stream?
112 // TODO: handle the Voice+Data combination with Codec2 1600
113 self.state.lock().unwrap().codec2 = Codec2::new(Codec2Mode::MODE_3200);
114 }
115
116 fn stream_data(&self, _frame_number: u16, _is_final: bool, data: Arc<[u8; 16]>) {
117 let mut state = self.state.lock().unwrap();
118 for encoded in data.chunks(8) {
119 if state.out_buf.len() < 1024 {
120 let mut samples = [i16::EQUILIBRIUM; 160]; // while assuming 3200
121 state.codec2.decode(&mut samples, encoded);
122 // TODO: maybe get rid of VecDeque so we can decode directly into ring buffer?
123 for s in samples {
124 state.out_buf.push_back(s);
125 }
126 } else {
127 debug!("out_buf overflow");
128 }
129 }
130 }
131 }
132
133 fn output_cb(data: &mut [i16], state: &Mutex<AdapterState>) {
134 let mut state = state.lock().unwrap();
135 for d in data {
136 *d = state.out_buf.pop_front().unwrap_or(i16::EQUILIBRIUM);
137 }
138 }
139
140 /// Create and manage the stream from a dedicated thread since it's `!Send`
141 fn stream_thread(
142 end: Receiver<()>,
143 setup_tx: Sender<Result<(), AdapterError>>,
144 state: Arc<Mutex<AdapterState>>,
145 output_card: Option<String>,
146 ) {
147 let host = cpal::default_host();
148 let device = if let Some(output_card) = output_card {
149 // TODO: more error handling for unwraps
150 match host
151 .output_devices()
152 .unwrap()
153 .find(|d| d.name().unwrap() == output_card)
154 {
155 Some(d) => d,
156 None => {
157 let _ = setup_tx.send(Err(M17Codec2Error::CardUnavailable(output_card).into()));
158 return;
159 }
160 }
161 } else {
162 match host.default_output_device() {
163 Some(d) => d,
164 None => {
165 let _ = setup_tx.send(Err(M17Codec2Error::DefaultCardUnavailable.into()));
166 return;
167 }
168 }
169 };
170 let card_name = device.name().unwrap();
171 let mut configs = match device.supported_output_configs() {
172 Ok(c) => c,
173 Err(e) => {
174 let _ = setup_tx.send(Err(
175 M17Codec2Error::OutputConfigsUnavailable(card_name, e).into()
176 ));
177 return;
178 }
179 };
180 // TODO: channels == 1 doesn't work on a Raspberry Pi
181 // make this configurable and support interleaving LRLR stereo samples if using 2 channels
182 let config = match configs.find(|c| c.channels() == 1 && c.sample_format() == SampleFormat::I16)
183 {
184 Some(c) => c,
185 None => {
186 let _ = setup_tx.send(Err(
187 M17Codec2Error::SupportedOutputUnavailable(card_name).into()
188 ));
189 return;
190 }
191 };
192
193 let config = config.with_sample_rate(SampleRate(8000));
194 let stream = match device.build_output_stream(
195 &config.into(),
196 move |data: &mut [i16], _info: &cpal::OutputCallbackInfo| {
197 output_cb(data, &state);
198 },
199 |e| {
200 // trigger end_tx here? always more edge cases
201 debug!("error occurred in codec2 playback: {e:?}");
202 },
203 None,
204 ) {
205 Ok(s) => s,
206 Err(e) => {
207 let _ = setup_tx.send(Err(
208 M17Codec2Error::OutputStreamBuildError(card_name, e).into()
209 ));
210 return;
211 }
212 };
213 match stream.play() {
214 Ok(()) => (),
215 Err(e) => {
216 let _ = setup_tx.send(Err(
217 M17Codec2Error::OutputStreamPlayError(card_name, e).into()
218 ));
219 return;
220 }
221 }
222 let _ = setup_tx.send(Ok(()));
223 let _ = end.recv();
224 // it seems concrete impls of Stream have a Drop implementation that will handle termination
225 }
226
227 /// Transmits a wave file as an M17 stream
228 pub struct WavePlayer;
229
230 impl WavePlayer {
231 /// Plays a wave file (blocking).
232 ///
233 /// * `path`: wave file to transmit, must be 8 kHz mono and 16-bit LE
234 /// * `tx`: a `TxHandle` obtained from an `M17App`
235 /// * `source`: address of transmission source
236 /// * `destination`: address of transmission destination
237 /// * `channel_access_number`: from 0 to 15, usually 0
238 pub fn play(
239 path: PathBuf,
240 tx: TxHandle,
241 source: &M17Address,
242 destination: &M17Address,
243 channel_access_number: u8,
244 ) {
245 let mut reader = hound::WavReader::open(path).unwrap();
246 let mut samples = reader.samples::<i16>();
247
248 let mut codec = Codec2::new(Codec2Mode::MODE_3200);
249 let mut in_buf = [0i16; 160];
250 let mut out_buf = [0u8; 16];
251 let mut lsf_chunk: usize = 0;
252 const TICK: Duration = Duration::from_millis(40);
253 let mut next_tick = Instant::now() + TICK;
254 let mut frame_number = 0;
255
256 let mut setup = LinkSetup::new_voice(source, destination);
257 setup.set_channel_access_number(channel_access_number);
258 tx.transmit_stream_start(&setup);
259
260 loop {
261 let mut last_one = false;
262 for out in out_buf.chunks_mut(8) {
263 for i in in_buf.iter_mut() {
264 let sample = match samples.next() {
265 Some(Ok(sample)) => sample,
266 _ => {
267 last_one = true;
268 0
269 }
270 };
271 *i = sample;
272 }
273 codec.encode(out, &in_buf);
274 }
275 tx.transmit_stream_next(&StreamFrame {
276 lich_idx: lsf_chunk as u8,
277 lich_part: setup.lich_part(lsf_chunk as u8),
278 frame_number,
279 end_of_stream: last_one,
280 stream_data: out_buf,
281 });
282 frame_number += 1;
283 lsf_chunk = (lsf_chunk + 1) % 6;
284
285 if last_one {
286 break;
287 }
288
289 std::thread::sleep(next_tick.duration_since(Instant::now()));
290 next_tick += TICK;
291 }
292 }
293 }
294
295 #[derive(Debug, Error)]
296 pub enum M17Codec2Error {
297 #[error("selected card '{0}' does not exist or is in use")]
298 CardUnavailable(String),
299
300 #[error("default output card is unavailable")]
301 DefaultCardUnavailable,
302
303 #[error("selected card '{0}' failed to list available output configs: '{1}'")]
304 OutputConfigsUnavailable(String, #[source] cpal::SupportedStreamConfigsError),
305
306 #[error("selected card '{0}' did not offer a compatible output config type, either due to hardware limitations or because it is currently in use")]
307 SupportedOutputUnavailable(String),
308
309 #[error("selected card '{0}' was unable to build an output stream: '{1}'")]
310 OutputStreamBuildError(String, #[source] cpal::BuildStreamError),
311
312 #[error("selected card '{0}' was unable to play an output stream: '{1}'")]
313 OutputStreamPlayError(String, #[source] cpal::PlayStreamError),
314 }