Sample Header Ad - 728x90

FFmpeg not detecting input streams for RTMP live recording

0 votes
0 answers
39 views
getting this type of error Opening an output file: rtmp://localhost:1935/live/stream. FFmpeg stderr: [out#0/flv @ 0x600003b843c0] No explicit maps, mapping streams automatically... FFmpeg stderr: Output #0, flv, to 'rtmp://localhost:1935/live/stream': [out#0/flv @ 0x600003b843c0] Output file does not contain any stream FFmpeg stderr: Error opening output file rtmp://localhost:1935/live/stream. Error opening output files: Invalid argument FFmpeg process closed with code 234
import { spawn, ChildProcessWithoutNullStreams } from "child_process";
import fs from "fs";
import path from "path";
import ffmpeg from 'fluent-ffmpeg';


class RecordingService {
    private ffmpegProcess: any = null; //ffmpeg.FfmpegCommand | null = null;
    private readonly recordingsDir: string = "recordings";
    private outputFile: string = "";
    private readonly streamUrl: string = "rtmp://localhost:1935/live/stream";
    private readonly hlsOutputDir: string = "/usr/local/nginx/html/hls";

    constructor() {
        if (!fs.existsSync(this.recordingsDir)) {
            fs.mkdirSync(this.recordingsDir, { recursive: true });
        }
    }

    async startRecording(data: any, socket: any): Promise {
        console.log("Starting FFmpeg recording...", JSON.stringify(data, null, 2));

        this.outputFile = path.join(this.recordingsDir, recording_${Date.now()}.mp4);
        const hlsOutput = path.join(this.hlsOutputDir, "live.m3u8");
        console.log("🚀 ~ RecordingService ~ startRecording ~ hlsOutput:", hlsOutput);

        const audioTrack = data?.stream?._tracks.find((track: any) => track.kind === "audio");
        const videoTrack = data?.stream?._tracks.find((track: any) => track.kind === "video");

        if (!audioTrack || !videoTrack) {
            console.error("Audio or video track missing.");
            return;
        }

        const audioDevice = audioTrack._settings.deviceId;
        const videoDevice = videoTrack._settings.deviceId;
        console.log("🚀 ~ RecordingService ~ startRecording ~ audioDevice:", audioDevice);
        console.log("🚀 ~ RecordingService ~ startRecording ~ videoDevice:", videoDevice);

        const ffmpegArgs = [
            '-loglevel', 'debug',
            // '-f', pipe: 0,  // Use 'avfoundation' for macOS
            '-f', 'flv', this.streamUrl,
            "-an",
            // '-i', ${videoDevice}:${audioDevice},  // Combined video and audio input (using device IDs)
            '-pix_fmt', 'uyvy422',
            '-acodec', 'aac',
            '-vcodec', 'libx264',
            '-b:v', '1000k',
            '-preset', 'ultrafast',
            '-tune', 'zerolatency',
            '-f', 'mp4',
            this.outputFile
        ];
        this.ffmpegProcess = spawn('ffmpeg', ffmpegArgs);
        console.log(Recording started. Saving to: ${this.outputFile});

        // Handle FFmpeg process events
        this.ffmpegProcess.stdout.on('data', (data: Buffer) => {
            console.log('FFmpeg stdout:', data.toString());
        });

        this.ffmpegProcess.stderr.on('data', (data: Buffer) => {
            console.error('FFmpeg stderr:', data.toString());
        });

        this.ffmpegProcess.on('close', (code: number) => {
            console.log(FFmpeg process closed with code ${code});
        });

        socket.on('data', (streamData: Buffer) => {
            if (this.ffmpegProcess) {
                console.log(" Write the stream data to ffmpeg's stdin......");
                this.ffmpegProcess.stdin.write(streamData);  // Write the stream data to ffmpeg's stdin
            } else {
                console.error('FFmpeg process is not available.');
            }
        });

        socket.on('end', () => {
            if (this.ffmpegProcess) {
                this.ffmpegProcess.stdin.end();  // Properly close the input stream
            }
        });

        console.log(Recording started. Saving to: ${this.outputFile});
    }

    async stopRecording(): Promise {
        console.log("Stopping recording...");
        if (this.ffmpegProcess) {
            console.log(1111111111)
            // If FFmpeg process is running, stop it
            this.ffmpegProcess.kill('SIGINT');
            console.log(22222222)
            await new Promise((resolve) => setTimeout(resolve, 2000));

            if (fs.existsSync(this.outputFile)) {
                console.log(3333333)
                console.log(Recording saved: ${this.outputFile});
            } else {
                console.warn("Recording file not found after stopping!");
            }

            this.ffmpegProcess = null;
        }
    }

    getLatestRecording(): string | null {
        const files = fs.readdirSync(this.recordingsDir)
            .filter(file => file.endsWith(".mp4"))
            .sort((a, b) => fs.statSync(path.join(this.recordingsDir, b)).mtimeMs - fs.statSync(path.join(this.recordingsDir, a)).mtimeMs);

        return files.length > 0 ? path.join(this.recordingsDir, files) : null;
    }
}
export default RecordingService;
Asked by gayatri Sutariya (1 rep)
Apr 1, 2025, 09:19 AM
Last activity: Apr 1, 2025, 09:33 AM