To build a voice and video enabled AI application without a frontend framework, you can use the Layercode JavaScript SDK.
import { LayercodeClient } from "@layercode/js-sdk";
/**
* @class ChatSession
* @classdesc DOM-specific wrapper for LayercodeClient that handles UI integration
*/
class ChatSession {
/**
* Creates an instance of ChatSession.
* @param {string} micContainerId - The ID of the start/end session button.
* @param {string} codeWindowId - The ID of the code window element.
* @param {string} waveCanvasId - The ID of the waveform canvas element.
* @param {string} meterCanvasId - The ID of the audio meter canvas element.
*/
constructor(micContainerId, codeWindowId, waveCanvasId, meterCanvasId) {
this.codeWindow = document.getElementById(codeWindowId);
this.micContainer = document.getElementById(micContainerId);
this.speechWidget = new SpeechWidget(waveCanvasId);
this.meterWidget = new MeterWidget(meterCanvasId);
// Create the core client
this.client = new LayercodeClient({
onUserAmplitudeChange: (amplitude) => {
this.meterWidget.speakAmplitude = amplitude;
},
onAgentAmplitudeChange: (amplitude) => {
this.speechWidget.speakAmplitude = amplitude;
},
onDataMessage: (message) => {
if (message.type === "custom") {
this.codeWindow.innerHTML += message.content;
this.codeWindow.scrollTop = this.codeWindow.scrollHeight;
}
},
});
this.spaceBarHeld = false;
// Set up UI event listeners
this._setupEventListeners();
// Start the session
this.startSession();
}
/**
* Sets up DOM event listeners
* @private
*/
_setupEventListeners() {
// Touch events for mobile
this.micContainer.addEventListener("touchstart", async (event) => {
event.preventDefault();
console.log("touchstart");
await this.client.startSpeaking();
this._updateMicUI(true);
});
this.micContainer.addEventListener("touchend", async (event) => {
event.preventDefault();
console.log("touchend");
await this.client.stopSpeaking();
this._updateMicUI(false);
});
// Keyboard events for spacebar
document.addEventListener("keydown", async (event) => {
if (event.key === " " && !this.spaceBarHeld) {
this.spaceBarHeld = true;
await this.client.startSpeaking();
this._updateMicUI(true);
}
});
document.addEventListener("keyup", async (event) => {
if (event.key === " " && this.spaceBarHeld) {
this.spaceBarHeld = false;
await this.client.stopSpeaking();
this._updateMicUI(false);
}
});
}
/**
* Updates the microphone UI based on speaking state
* @param {boolean} isSpeaking - Whether the user is speaking
* @private
*/
_updateMicUI(isSpeaking) {
if (isSpeaking) {
this.micContainer.classList.remove("opacity-25");
this.micContainer.classList.add("opacity-90");
} else {
this.micContainer.classList.remove("opacity-90");
this.micContainer.classList.add("opacity-25");
}
}
/**
* Starts a new session
* @async
*/
async startSession() {
try {
await this.client.connect();
this.speechWidget.start();
} catch (error) {
console.error("Error starting session:", error);
}
}
/**
* Updates the prompt for the current session
* @param {string} prompt - The new prompt to use
*/
updatePrompt(prompt) {
this.client.updatePrompt(prompt);
}
}
// Initialize the ChatSession for vanilla JS usage
const chatSession = new ChatSession(
"mic-container",
"code-window",
"waveCanvas",
"mic-level"
);
// Expose chatSession to the global scope for debugging or additional interactions if necessary
window.chatSession = chatSession;
Was this page helpful?