import SRTC from '@seastart/srtc-web-sdk';
const srtc = new SRTC();
// 示例:使用 Web Audio API 合成一个音频 MediaStreamTrack
const audioContext = new AudioContext();
const oscillator = audioContext.createOscillator();
const destination = audioContext.createMediaStreamDestination();
oscillator.connect(destination);
oscillator.start();
const msAudioTrack = destination.stream.getAudioTracks()[0];
// 包装为 SDK 轨道并发布
const customAudioTrack = srtc.createLocalCustomAudioTrack(msAudioTrack);
await srtc.publishLocalTrack(customAudioTrack, { desc: '合成音频' });
```typescript
---
### 自定义视频流
通过 `srtc.createLocalCustomVideoTrack(msTrack)` 包装,适合 Canvas 绘制场景:
```typescript
import SRTC from '@seastart/srtc-web-sdk';
const srtc = new SRTC();
// 示例:将 Canvas 内容作为视频流推送
const canvas = document.querySelector<HTMLCanvasElement>('#my-canvas')!;
const ctx = canvas.getContext('2d')!;
// 绘制内容(此处以动画矩形为例)
function draw() {
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = `hsl(${Date.now() / 10 % 360}, 70%, 50%)`;
ctx.fillRect(50, 50, 200, 200);
requestAnimationFrame(draw);
}
draw();
// Canvas 捕获为 MediaStream(参数为帧率)
const canvasStream = canvas.captureStream(15);
const msVideoTrack = canvasStream.getVideoTracks()[0];
// 包装为 SDK 轨道
const customVideoTrack = srtc.createLocalCustomVideoTrack(msVideoTrack);
// 本地预览
customVideoTrack.addPlayView(document.querySelector<HTMLElement>('#preview')!);
// 发布
await srtc.publishLocalTrack(customVideoTrack, { desc: 'canvas 视频' });