Commit 82fffc62 authored by 张伯涛's avatar 张伯涛

音频波形图渲染优化

parent 272ebafb
......@@ -56,13 +56,13 @@ function stopPlayAudioRegion() {
wavesurfer.value.pause();
}
/** 放大*/
function zoom() {
function zoom(zoomLevel) {
let region = Object.values(wavesurfer.value.regions.list)[0];
if (region) {
const start = region.start; // 区域开始时间
const end = region.end; // 区域结束时间
const zoomLevel = 400; // 设置放大级别(可以根据需求调整)
// const zoomLevel = 400; // 设置放大级别(可以根据需求调整)
// 加载后进行缩放
wavesurfer.value.seekTo(start / wavesurfer.value.getDuration()); // 移动到开始时间
......@@ -70,37 +70,16 @@ function zoom() {
// wavesurfer.value.play(); // 播放新的音频段
}
}
/** 还原*/
function handleBack () {
let region = Object.values(wavesurfer.value.regions.list)[0];
if (region) {
const start = region.start; // 获取区域开始时间
const end = region.end; // 获取区域结束时间
// 移动到选定区域的开始时间
wavesurfer.value.seekTo(start / wavesurfer.value.getDuration());
// 设置默认缩放级别(假设为1或你所需的其他值)
const defaultZoomLevel = 1; // 根据需要调整
wavesurfer.value.zoom(defaultZoomLevel);
}
}
/** 获取音频*/
function playAudio() {
// 如果有波形图,则摧毁现有波形图,渲染新的波形图
if (wavesurfer.value) {
wavesurfer.value.destroy();
}
getAudio().then(async res => {
const audioSrc = URL.createObjectURL(new Blob([res]));
// 播放器和波形图赋值
audioPlayer.value.src = audioSrc;
// 从接口获取音频流
const audioStream = await fetchAudioStream();
const audioBlob = new Blob([audioStream], { type: 'audio/mp3' });
const audioObjectUrl = URL.createObjectURL(audioBlob);
console.log('audioStream',audioStream)
console.log('audioBlob',audioBlob)
console.log('audioObjectUrl',audioObjectUrl)
// 创建 WaveSurfer 实例
wavesurfer.value = WaveSurfer.create({
container: waveform.value,
......@@ -115,13 +94,14 @@ function playAudio() {
responsive: true,
normalize: true,
plugins: [
// 区域选择插件
RegionsPlugin.create(
{
regionsMinLength: 2,
regions: [
{
start: 5,
end: 7,
end: 15,
loop: false,
color: 'hsla(200, 50%, 70%, 0.4)',
minLength: 1,
......@@ -134,6 +114,7 @@ function playAudio() {
}
}
),
// 时间轴插件
TimelinePlugin.create({
container: timeline.value,
fontSize: 14,
......@@ -149,16 +130,9 @@ function playAudio() {
]
});
// 使用 WaveSurfer 加载音频流
wavesurfer.value.load(audioObjectUrl);
wavesurfer.value.load(audioSrc);
})
}
async function fetchAudioStream() {
const response = await fetch('http://192.168.0.14:8099/busFireExtinguisher/test');
if (!response.ok) {
throw new Error('Network response was not ok');
}
return response.arrayBuffer(); // 或 response.blob() 取决于返回的流类型
}
// 初始化时选中第一行
// 初始化时选中第一行
watch(
......@@ -365,7 +339,7 @@ defineExpose({
<!-- <span>{{ selectedRow.audioUrl }}</span>-->
</div>
<div ref="waveform" @mousedown="startSelection" @mouseup="endSelection" style="width: 800px; height: 150px;"></div>
<div ref="waveform" style="width: 800px; height: 150px;"></div>
<!-- 时间轴容器 -->
<div ref="timeline" style="width: 100%; height: 30px;"></div>
<div class="details-item">
......@@ -374,8 +348,8 @@ defineExpose({
<button @click="playAudio">获取音频</button>
<button @click="playAudioRegion">播放区域</button>
<button @click="stopPlayAudioRegion">暂停播放区域</button>
<button @click="zoom">放大</button>
<button @click="handleBack">还原</button>
<button @click="zoom(400)">放大</button>
<button @click="zoom(1)">还原</button>
<!-- <span>{{ selectedRow.audioUrl }}</span>-->
</div>
</div>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment