-
-
Notifications
You must be signed in to change notification settings - Fork 312
Open
Labels
Description
Description
I have 2 different components, one for Video one for Audio,
My Video component successfully plays the recorded Video, but My Audio does not play the audio only
I am using a Mac M1 pro
Vue 3
"videojs-record": "^4.7.0"
here is my complete component code
<script setup>
/* eslint-disable */
import "video.js/dist/video-js.css";
import "videojs-record/dist/css/videojs.record.css";
import videojs from "video.js";
import "webrtc-adapter";
import RecordRTC from "recordrtc";
import WaveSurfer from "wavesurfer.js";
import MicrophonePlugin from "wavesurfer.js/dist/plugin/wavesurfer.microphone.js";
WaveSurfer.microphone = MicrophonePlugin;
import Wavesurfer from "videojs-wavesurfer/dist/videojs.wavesurfer.js";
import videojs_wavesurfer_css from "videojs-wavesurfer/dist/css/videojs.wavesurfer.css?inline";
import Record from "videojs-record/dist/videojs.record.js";
const emit = defineEmits(['audio_recorded_data'])
const props = defineProps({
"audio-recorded-data": {
type: Object,
required: false,
default: {}
},
"is-reviewing": {
type: Boolean,
required: false,
default: false
},
})
const options = ref({
controls: true,
bigPlayButton: false,
width: 600,
height: 300,
fluid: false,
plugins: {
wavesurfer: {
backend: "WebAudio",
waveColor: "#36393b",
progressColor: "black",
displayMilliseconds: true,
debug: true,
cursorWidth: 1,
hideScrollbar: true,
plugins: [
// enable microphone plugin
WaveSurfer.microphone.create({
bufferSize: 4096,
numberOfInputChannels: 1,
numberOfOutputChannels: 1,
constraints: {
video: false,
audio: true,
},
}),
],
},
record: {
audio: true,
video: false,
maxLength: 300,
displayMilliseconds: false,
debug: true,
},
},
});
const player = ref(null);
const playerWidth = ref("80%"); // Initial width
const playerHeight = ref("auto"); // Initial height
const recordedData = ref(false)
const recordingTime = ref(0);
onMounted(() => {
// create player
window.addEventListener("resize", setPlayerSize());
setTimeout(() => {
player.value = videojs("myAudio", options.value, function () {
// print version information at startup
var msg =
"Using video.js " +
videojs.VERSION +
" with videojs-record " +
videojs.getPluginVersion("record") +
", videojs-wavesurfer " +
videojs.getPluginVersion("wavesurfer") +
", wavesurfer.js " +
WaveSurfer.VERSION +
" and recordrtc " +
RecordRTC.version;
videojs.log(msg);
});
// error handling
player.value.on("deviceError", function () {
console.log("device error:", player.value.deviceErrorCode);
});
player.value.on("error", function (element, error) {
console.error(error);
});
// user clicked the record button and started recording
player.value.on("startRecord", function () {
recordedData.value = false
console.log("started recording!");
});
// user completed recording and stream is available
player.value.on("finishRecord", function () {
recordedData.value = false
// the blob object contains the recorded data that
// can be downloaded by the user, stored on server etc.
console.log("finished recording: ", player.value.recordedData);
recordedData.value = player.value.recordedData
emit('audio_recorded_data', recordedData.value)
});
player.value.on('progressRecord', function () {
recordingTime.value = player.value.record().getDuration();
});
setTimeout(() => {
if (!props?.isReviewing && props?.audioRecordedData === undefined || Object.keys(props?.audioRecordedData).length === 0) {
setTimeout(() => {
player.value.record().getDevice();
}, 500);
} else {
setTimeout(() => {
try {
const blob = new Blob([props?.audioRecordedData], { type: props?.audioRecordedData?.type });
const blobUrl = URL.createObjectURL(blob);
// player.value.src({ type: props?.audioRecordedData?.type, src: blobUrl });
// player.value.play();
// Option 2: Create a new player instance (experiment with both)
const newPlayer = videojs("myAudio", options.value, function () {
this.src({ type: props?.audioRecordedData?.type, src: blobUrl });
this.play(); // Play the audio
});
} catch (error) {
console.error('Error setting audio source:', error);
}
}, 500);
}
}, 200);
}, 100);
});
const formattedRecordingTime = computed(() => {
const minutes = Math.floor(recordingTime.value / 60);
const seconds = Math.floor(recordingTime.value % 60);
return `0${minutes}:${seconds < 10 ? '0' : ''}${seconds}`;
})
const setPlayerSize = () => {
// Adjust player size based on window size or other conditions
// Example: Make the player width 80% of the window width and height 60% of the window height
playerWidth.value = `${window.innerWidth * 100}px`;
playerHeight.value = `${window.innerHeight * 0.55}px`;
};
onBeforeUnmount(() => {
window.removeEventListener("resize", setPlayerSize());
if (player.value) {
player.value.record().destroy();
}
});
const recordingStarted = ref(false)
const startRecording = () => {
player.value.record().start()
emit('audio_recorded_data', false)
recordingStarted.value = true
}
const stopRecording = () => {
player.value.record().stop()
recordingStarted.value = false
}
/* const resetRecording = () => {
player.value.record().reset()
recordingStarted.value = false
recordedData.value = false
recordedData.time = 0
} */
</script>
<template>
<audio id="myAudio" class="video-js vjs-default-skin"></audio>
<div class="px-20">
<div class="flex justify-center font-extrabold md:font-bold sm:font-semibold sm:text-xl md:text-2xl text-3xl">
<!-- Use a combination of flexible and fixed width for the time ticker container -->
<div class="w-full md:w-auto md:max-w-xs"> <!-- Adjust the max width as needed -->
<div class="w-60 text-center">
<h1 class="blue text-center">{{ formattedRecordingTime }}</h1>
</div>
</div>
</div>
<div class="flex items-center justify-center text-center" v-if="!props?.isReviewing">
<div class="bg-blue-light px-8 py-3 rounded-full" v-if="!recordingStarted">
<div class="flex items-center gap-2">
<button class="flex items-center gap-2" @click="startRecording">
<span>
<svg width="20px" height="20px" viewBox="0 0 24 24" fill="none"
xmlns="http://www.w3.org/2000/svg" stroke="#C9C9C9" stroke-width="0.336">
<g id="SVGRepo_bgCarrier" stroke-width="0" />
<g id="SVGRepo_tracerCarrier" stroke-linecap="round" stroke-linejoin="round" />
<g id="SVGRepo_iconCarrier">
<path
d="M21.4086 9.35258C23.5305 10.5065 23.5305 13.4935 21.4086 14.6474L8.59662 21.6145C6.53435 22.736 4 21.2763 4 18.9671L4 5.0329C4 2.72368 6.53435 1.26402 8.59661 2.38548L21.4086 9.35258Z"
fill="#1E346F" />
</g>
</svg>
</span>
<span class="text-blue"> Start </span>
</button>
</div>
</div>
<div class="bg-blue-light px-3 py-3 rounded-full" v-if="recordingStarted">
<div class="flex items-center gap-2">
<button class="flex items-center gap-2" @click="stopRecording">
<span>
<svg width="24px" height="24px" viewBox="0 0 24.00 24.00" fill="none"
xmlns="http://www.w3.org/2000/svg" stroke="#C9C9C9" stroke-width="0.336">
<g id="SVGRepo_bgCarrier" stroke-width="0" />
<g id="SVGRepo_tracerCarrier" stroke-linecap="round" stroke-linejoin="round" />
<g id="SVGRepo_iconCarrier">
<path
d="M2 6C2 4.11438 2 3.17157 2.58579 2.58579C3.17157 2 4.11438 2 6 2C7.88562 2 8.82843 2 9.41421 2.58579C10 3.17157 10 4.11438 10 6V18C10 19.8856 10 20.8284 9.41421 21.4142C8.82843 22 7.88562 22 6 22C4.11438 22 3.17157 22 2.58579 21.4142C2 20.8284 2 19.8856 2 18V6Z"
fill="#1E346F" />
<path
d="M14 6C14 4.11438 14 3.17157 14.5858 2.58579C15.1716 2 16.1144 2 18 2C19.8856 2 20.8284 2 21.4142 2.58579C22 3.17157 22 4.11438 22 6V18C22 19.8856 22 20.8284 21.4142 21.4142C20.8284 22 19.8856 22 18 22C16.1144 22 15.1716 22 14.5858 21.4142C14 20.8284 14 19.8856 14 18V6Z"
fill="#1E346F" />
</g>
</svg>
</span>
</button>
</div>
</div>
</div>
</div>
</template>
<style scoped>
/* change player background color */
#myAudio {
background-color: #9fd6ba;
border-radius: 10px !important;
}
.audio-player-container {
/* Add additional styling for the container if needed */
display: flex;
justify-content: center;
align-items: center;
}
:deep(.vjs-control-bar) {
border-bottom-right-radius: 10px !important;
border-bottom-left-radius: 10px !important;
}</style>