feat: Обновленая реализация CLI
This commit is contained in:
372
src/cli.ts
Normal file
372
src/cli.ts
Normal file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* DASH Video Converter CLI
|
||||
*
|
||||
* Usage:
|
||||
* create-vod <input-video> [output-dir] [-r resolutions] [-p poster-timecode]
|
||||
*
|
||||
* Example:
|
||||
* create-vod ./video.mp4 ./output -r 720,1080
|
||||
*/
|
||||
|
||||
import { convertToDash, checkFFmpeg, checkMP4Box, getVideoMetadata, detectHardwareEncoders } from './index';
|
||||
import cliProgress from 'cli-progress';
|
||||
import { statSync } from 'node:fs';
|
||||
import { basename, extname } from 'node:path';
|
||||
import type { CodecType, StreamingFormat, QualitySettings, HardwareAccelerationOption } from './types';
|
||||
import { selectProfiles, createProfilesFromStrings } from './config/profiles';
|
||||
|
||||
// Parse arguments
|
||||
const args = process.argv.slice(2);
|
||||
let customProfiles: string[] | undefined;
|
||||
let posterTimecode: string | undefined;
|
||||
let codecType: CodecType = 'dual'; // Default to dual codec
|
||||
let formatType: StreamingFormat = 'both'; // Default to both formats (DASH + HLS)
|
||||
const positionalArgs: string[] = [];
|
||||
|
||||
// Quality settings
|
||||
let h264CQ: number | undefined;
|
||||
let h264CRF: number | undefined;
|
||||
let av1CQ: number | undefined;
|
||||
let av1CRF: number | undefined;
|
||||
let accelerator: HardwareAccelerationOption | undefined;
|
||||
|
||||
// First pass: extract flags and their values
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] === '-r' || args[i] === '--resolutions') {
|
||||
// Collect all arguments after -r until next flag or end
|
||||
const profilesArgs: string[] = [];
|
||||
for (let j = i + 1; j < args.length; j++) {
|
||||
// Stop if we hit another flag (starts with -)
|
||||
if (args[j].startsWith('-')) {
|
||||
break;
|
||||
}
|
||||
profilesArgs.push(args[j]);
|
||||
i = j; // Skip these args in main loop
|
||||
}
|
||||
|
||||
// Parse profiles
|
||||
const joinedArgs = profilesArgs.join(',');
|
||||
customProfiles = joinedArgs
|
||||
.split(/[,\s]+/) // Split by comma or whitespace
|
||||
.map(s => s.trim())
|
||||
.filter(s => s.length > 0);
|
||||
} else if (args[i] === '-p' || args[i] === '--poster') {
|
||||
posterTimecode = args[i + 1];
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '-c' || args[i] === '--codec') {
|
||||
const codec = args[i + 1];
|
||||
if (codec === 'av1' || codec === 'h264' || codec === 'dual') {
|
||||
codecType = codec;
|
||||
} else {
|
||||
console.error(`❌ Invalid codec: ${codec}. Valid options: av1, h264, dual`);
|
||||
process.exit(1);
|
||||
}
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '-f' || args[i] === '--format') {
|
||||
const format = args[i + 1];
|
||||
if (format === 'dash' || format === 'hls' || format === 'both') {
|
||||
formatType = format;
|
||||
} else {
|
||||
console.error(`❌ Invalid format: ${format}. Valid options: dash, hls, both`);
|
||||
process.exit(1);
|
||||
}
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '--h264-cq') {
|
||||
h264CQ = parseInt(args[i + 1]);
|
||||
if (isNaN(h264CQ) || h264CQ < 0 || h264CQ > 51) {
|
||||
console.error(`❌ Invalid H.264 CQ value: ${args[i + 1]}. Must be 0-51`);
|
||||
process.exit(1);
|
||||
}
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '--h264-crf') {
|
||||
h264CRF = parseInt(args[i + 1]);
|
||||
if (isNaN(h264CRF) || h264CRF < 0 || h264CRF > 51) {
|
||||
console.error(`❌ Invalid H.264 CRF value: ${args[i + 1]}. Must be 0-51`);
|
||||
process.exit(1);
|
||||
}
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '--av1-cq') {
|
||||
av1CQ = parseInt(args[i + 1]);
|
||||
if (isNaN(av1CQ) || av1CQ < 0 || av1CQ > 51) {
|
||||
console.error(`❌ Invalid AV1 CQ value: ${args[i + 1]}. Must be 0-51`);
|
||||
process.exit(1);
|
||||
}
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '--av1-crf') {
|
||||
av1CRF = parseInt(args[i + 1]);
|
||||
if (isNaN(av1CRF) || av1CRF < 0 || av1CRF > 63) {
|
||||
console.error(`❌ Invalid AV1 CRF value: ${args[i + 1]}. Must be 0-63`);
|
||||
process.exit(1);
|
||||
}
|
||||
i++; // Skip next arg
|
||||
} else if (args[i] === '--accel' || args[i] === '--hardware') {
|
||||
const acc = args[i + 1];
|
||||
const allowed: HardwareAccelerationOption[] = ['auto', 'nvenc', 'qsv', 'amf', 'cpu'];
|
||||
if (!allowed.includes(acc as HardwareAccelerationOption)) {
|
||||
console.error(`❌ Invalid accelerator: ${acc}. Valid: auto, nvenc, qsv, amf, cpu`);
|
||||
process.exit(1);
|
||||
}
|
||||
accelerator = acc as HardwareAccelerationOption;
|
||||
i++;
|
||||
} else if (!args[i].startsWith('-')) {
|
||||
// Positional argument
|
||||
positionalArgs.push(args[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract positional arguments
|
||||
const input = positionalArgs[0];
|
||||
const outputDir = positionalArgs[1] || '.'; // Текущая директория по умолчанию
|
||||
|
||||
if (!input) {
|
||||
console.error('❌ Usage: create-vod <input-video> [output-dir] [options]');
|
||||
console.error('\nOptions:');
|
||||
console.error(' -r, --resolutions Video resolutions (e.g., 360,480,720 or 720@60,1080@60)');
|
||||
console.error(' -c, --codec Video codec: av1, h264, or dual (default: dual)');
|
||||
console.error(' -f, --format Streaming format: dash, hls, or both (default: both)');
|
||||
console.error(' -p, --poster Poster timecode (e.g., 00:00:05 or 10)');
|
||||
console.error(' --accel <type> Hardware accelerator: auto|nvenc|qsv|amf|cpu (default: auto)');
|
||||
console.error('\nQuality Options (override defaults):');
|
||||
console.error(' --h264-cq <value> H.264 GPU CQ value (0-51, lower = better, default: auto)');
|
||||
console.error(' --h264-crf <value> H.264 CPU CRF value (0-51, lower = better, default: auto)');
|
||||
console.error(' --av1-cq <value> AV1 GPU CQ value (0-51, lower = better, default: auto)');
|
||||
console.error(' --av1-crf <value> AV1 CPU CRF value (0-63, lower = better, default: auto)');
|
||||
console.error('\nExamples:');
|
||||
console.error(' create-vod video.mp4');
|
||||
console.error(' create-vod video.mp4 ./output');
|
||||
console.error(' create-vod video.mp4 -r 360,480,720');
|
||||
console.error(' create-vod video.mp4 -c av1 --av1-cq 40');
|
||||
console.error(' create-vod video.mp4 -c dual --h264-cq 30 --av1-cq 39');
|
||||
console.error(' create-vod video.mp4 -f hls');
|
||||
console.error(' create-vod video.mp4 -c dual -f both');
|
||||
console.error(' create-vod video.mp4 -r 720@60,1080@60,2160@60 -c av1 -f dash');
|
||||
console.error(' create-vod video.mp4 -p 00:00:05');
|
||||
console.error(' create-vod video.mp4 ./output -r 720,1080 -c dual -f both -p 10 --h264-cq 28 --av1-cq 37');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('🔍 Checking system...\n');
|
||||
|
||||
const hasFFmpeg = await checkFFmpeg();
|
||||
const hasMP4Box = await checkMP4Box();
|
||||
const hwEncoders = await detectHardwareEncoders();
|
||||
|
||||
const accelPriority: Record<string, number> = {
|
||||
nvenc: 100,
|
||||
qsv: 90,
|
||||
amf: 80,
|
||||
vaapi: 70,
|
||||
videotoolbox: 65,
|
||||
v4l2: 60
|
||||
};
|
||||
|
||||
const bestAccel = hwEncoders
|
||||
.slice()
|
||||
.sort((a, b) => (accelPriority[b.accelerator] || 0) - (accelPriority[a.accelerator] || 0))[0];
|
||||
|
||||
console.log(`FFmpeg: ${hasFFmpeg ? '✅' : '❌'}`);
|
||||
console.log(`MP4Box: ${hasMP4Box ? '✅' : '❌'}`);
|
||||
const accelList = Array.from(new Set(hwEncoders.map(e => e.accelerator.toUpperCase())));
|
||||
const bestAccelName = bestAccel ? bestAccel.accelerator.toUpperCase() : undefined;
|
||||
const accelRest = accelList.filter(name => name !== bestAccelName);
|
||||
const accelLabel = bestAccelName
|
||||
? `✅ ${bestAccelName}${accelRest.length > 0 ? ` (${accelRest.join(', ')})` : ''}`
|
||||
: '❌';
|
||||
console.log(`Hardware: ${accelLabel}`);
|
||||
console.log('');
|
||||
|
||||
if (!hasFFmpeg) {
|
||||
console.error('❌ FFmpeg not found. Please install FFmpeg first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!hasMP4Box) {
|
||||
console.error('❌ MP4Box not found. Please install: sudo pacman -S gpac');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Validate codec selection
|
||||
const hasAv1Hardware = hwEncoders.some(item => item.av1Encoder);
|
||||
|
||||
if ((codecType === 'av1' || codecType === 'dual') && !hasAv1Hardware) {
|
||||
console.error(`⚠️ Warning: AV1 encoding requested but no hardware AV1 encoder found.`);
|
||||
console.error(` CPU-based AV1 encoding (libsvtav1) will be VERY slow.`);
|
||||
console.error(` Consider using --codec h264 for faster encoding.\n`);
|
||||
}
|
||||
|
||||
// Validate HLS requires H.264
|
||||
if ((formatType === 'hls' || formatType === 'both') && codecType === 'av1') {
|
||||
console.error(`❌ Error: HLS format requires H.264 codec for Safari/iOS compatibility.`);
|
||||
console.error(` Please use --codec h264 or --codec dual with --format hls\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Get video metadata and file size
|
||||
console.log('📊 Analyzing video...\n');
|
||||
const metadata = await getVideoMetadata(input);
|
||||
const fileStats = statSync(input);
|
||||
const fileSizeMB = (fileStats.size / (1024 * 1024)).toFixed(2);
|
||||
|
||||
console.log('📹 Video Information:');
|
||||
console.log(` File: ${input}`);
|
||||
console.log(` Size: ${fileSizeMB} MB`);
|
||||
console.log(` Resolution: ${metadata.width}x${metadata.height}`);
|
||||
console.log(` FPS: ${metadata.fps.toFixed(2)}`);
|
||||
console.log(` Duration: ${Math.floor(metadata.duration / 60)}m ${Math.floor(metadata.duration % 60)}s`);
|
||||
console.log(` Codec: ${metadata.codec}`);
|
||||
if (metadata.videoBitrate) {
|
||||
console.log(` Video Bitrate: ${(metadata.videoBitrate / 1000).toFixed(2)} Mbps`);
|
||||
}
|
||||
if (metadata.audioBitrate) {
|
||||
console.log(` Audio Bitrate: ${metadata.audioBitrate} kbps`);
|
||||
}
|
||||
|
||||
// Pre-calc profiles for display (match internal selection logic)
|
||||
let displayProfiles: string[] = [];
|
||||
if (customProfiles && customProfiles.length > 0) {
|
||||
const profileResult = createProfilesFromStrings(
|
||||
customProfiles,
|
||||
metadata.width,
|
||||
metadata.height,
|
||||
metadata.fps,
|
||||
metadata.videoBitrate
|
||||
);
|
||||
|
||||
if (profileResult.errors.length > 0) {
|
||||
console.error('\n❌ Profile errors:');
|
||||
profileResult.errors.forEach(err => console.error(` - ${err}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (profileResult.warnings.length > 0) {
|
||||
console.warn('\n⚠️ Profile warnings:');
|
||||
profileResult.warnings.forEach(warn => console.warn(` - ${warn}`));
|
||||
}
|
||||
|
||||
displayProfiles = profileResult.profiles.map(p => p.name);
|
||||
} else {
|
||||
const autoProfiles = selectProfiles(
|
||||
metadata.width,
|
||||
metadata.height,
|
||||
metadata.fps,
|
||||
metadata.videoBitrate
|
||||
);
|
||||
displayProfiles = autoProfiles.map(p => p.name);
|
||||
}
|
||||
|
||||
const manifestDesc =
|
||||
formatType === 'both' ? 'DASH (manifest.mpd), HLS (master.m3u8)' :
|
||||
formatType === 'dash' ? 'DASH (manifest.mpd)' : 'HLS (master.m3u8)';
|
||||
|
||||
const thumbnailsPlanned = true;
|
||||
const posterPlanned = posterTimecode || '00:00:00';
|
||||
|
||||
console.log('\n📦 Parameters:');
|
||||
console.log(` Input: ${input}`);
|
||||
console.log(` Output: ${outputDir}`);
|
||||
console.log(` Codec: ${codecType}${codecType === 'dual' ? ' (AV1 + H.264)' : ''}`);
|
||||
console.log(` Profiles: ${displayProfiles.join(', ')}`);
|
||||
console.log(` Manifests: ${manifestDesc}`);
|
||||
console.log(` Poster: ${posterPlanned} (will be generated)`);
|
||||
console.log(` Thumbnails: ${thumbnailsPlanned ? 'yes (with VTT)' : 'no'}`);
|
||||
console.log(` Accelerator: ${bestAccel ? bestAccel.accelerator.toUpperCase() : 'CPU'}`);
|
||||
|
||||
// Build quality settings if any are specified
|
||||
let quality: QualitySettings | undefined;
|
||||
if (h264CQ !== undefined || h264CRF !== undefined || av1CQ !== undefined || av1CRF !== undefined) {
|
||||
quality = {};
|
||||
|
||||
if (h264CQ !== undefined || h264CRF !== undefined) {
|
||||
quality.h264 = {};
|
||||
if (h264CQ !== undefined) quality.h264.cq = h264CQ;
|
||||
if (h264CRF !== undefined) quality.h264.crf = h264CRF;
|
||||
console.log(`🎚️ H.264 Quality: ${h264CQ !== undefined ? `CQ ${h264CQ}` : ''}${h264CRF !== undefined ? ` CRF ${h264CRF}` : ''}`);
|
||||
}
|
||||
|
||||
if (av1CQ !== undefined || av1CRF !== undefined) {
|
||||
quality.av1 = {};
|
||||
if (av1CQ !== undefined) quality.av1.cq = av1CQ;
|
||||
if (av1CRF !== undefined) quality.av1.crf = av1CRF;
|
||||
console.log(`🎚️ AV1 Quality: ${av1CQ !== undefined ? `CQ ${av1CQ}` : ''}${av1CRF !== undefined ? ` CRF ${av1CRF}` : ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n🚀 Starting conversion...\n');
|
||||
|
||||
// Create multibar container
|
||||
const multibar = new cliProgress.MultiBar({
|
||||
format: '{stage} | {bar} | {percentage}% | {name}',
|
||||
barCompleteChar: '█',
|
||||
barIncompleteChar: '░',
|
||||
hideCursor: true,
|
||||
clearOnComplete: false,
|
||||
stopOnComplete: true
|
||||
}, cliProgress.Presets.shades_classic);
|
||||
|
||||
// Track progress bars for each profile
|
||||
const bars: Record<string, any> = {};
|
||||
let overallBar: any = null;
|
||||
|
||||
try {
|
||||
const result = await convertToDash({
|
||||
input,
|
||||
outputDir,
|
||||
customProfiles,
|
||||
posterTimecode,
|
||||
codec: codecType,
|
||||
format: formatType,
|
||||
segmentDuration: 2,
|
||||
hardwareAccelerator: accelerator,
|
||||
quality,
|
||||
generateThumbnails: true,
|
||||
generatePoster: true,
|
||||
parallel: true,
|
||||
onProgress: (progress) => {
|
||||
const stageName = progress.stage === 'encoding' ? 'Encoding' :
|
||||
progress.stage === 'thumbnails' ? 'Thumbnails' :
|
||||
progress.stage === 'manifest' ? 'Manifest' :
|
||||
progress.stage === 'analyzing' ? 'Analyzing' : 'Complete';
|
||||
|
||||
// Stage 1: Encoding - show individual profile bars
|
||||
if (progress.stage === 'encoding' && progress.currentProfile) {
|
||||
if (!bars[progress.currentProfile]) {
|
||||
bars[progress.currentProfile] = multibar.create(100, 0, {
|
||||
stage: 'Encode',
|
||||
name: progress.currentProfile
|
||||
});
|
||||
}
|
||||
// Use profilePercent (0-100) for individual bars, not overall percent
|
||||
const profileProgress = progress.profilePercent ?? progress.percent;
|
||||
bars[progress.currentProfile].update(profileProgress, {
|
||||
stage: 'Encode',
|
||||
name: progress.currentProfile
|
||||
});
|
||||
}
|
||||
|
||||
// Overall progress bar
|
||||
if (!overallBar) {
|
||||
overallBar = multibar.create(100, 0, {
|
||||
stage: stageName,
|
||||
name: 'Overall'
|
||||
});
|
||||
}
|
||||
|
||||
overallBar.update(progress.percent, {
|
||||
stage: stageName,
|
||||
name: progress.message || 'Overall'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
multibar.stop();
|
||||
|
||||
console.log('\n✅ Conversion completed successfully!\n');
|
||||
|
||||
} catch (error) {
|
||||
multibar.stop();
|
||||
console.error('\n\n❌ Error during conversion:');
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
274
src/config/profiles.ts
Normal file
274
src/config/profiles.ts
Normal file
@@ -0,0 +1,274 @@
|
||||
import type { VideoProfile } from '../types';
|
||||
|
||||
/**
|
||||
* Get optimal BPP (Bits Per Pixel) based on resolution
|
||||
* Lower resolutions need higher BPP for good quality
|
||||
* Higher resolutions can use lower BPP due to more pixels
|
||||
*/
|
||||
function getBPP(width: number, height: number): number {
|
||||
const pixels = width * height;
|
||||
|
||||
if (pixels <= 640 * 360) return 0.08; // 360p - higher quality needed
|
||||
if (pixels <= 854 * 480) return 0.075; // 480p
|
||||
if (pixels <= 1280 * 720) return 0.07; // 720p
|
||||
if (pixels <= 1920 * 1080) return 0.065; // 1080p
|
||||
if (pixels <= 2560 * 1440) return 0.06; // 1440p (2K)
|
||||
return 0.055; // 4K - lower BPP but still quality
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate optimal video bitrate based on resolution and FPS
|
||||
* Formula: width × height × fps × bpp
|
||||
*/
|
||||
function calculateBitrate(
|
||||
width: number,
|
||||
height: number,
|
||||
fps: number = 30,
|
||||
maxBitrate?: number
|
||||
): string {
|
||||
const bpp = getBPP(width, height);
|
||||
let bitrate = Math.round((width * height * fps * bpp) / 1000);
|
||||
|
||||
// Don't exceed source bitrate (no point in upscaling quality)
|
||||
if (maxBitrate && bitrate > maxBitrate) {
|
||||
bitrate = maxBitrate;
|
||||
}
|
||||
|
||||
return `${bitrate}k`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default video quality profiles for 30 FPS
|
||||
*/
|
||||
export const DEFAULT_PROFILES: VideoProfile[] = [
|
||||
{
|
||||
name: '360p',
|
||||
width: 640,
|
||||
height: 360,
|
||||
videoBitrate: calculateBitrate(640, 360, 30),
|
||||
audioBitrate: '192k'
|
||||
},
|
||||
{
|
||||
name: '480p',
|
||||
width: 854,
|
||||
height: 480,
|
||||
videoBitrate: calculateBitrate(854, 480, 30),
|
||||
audioBitrate: '192k'
|
||||
},
|
||||
{
|
||||
name: '720p',
|
||||
width: 1280,
|
||||
height: 720,
|
||||
videoBitrate: calculateBitrate(1280, 720, 30),
|
||||
audioBitrate: '192k'
|
||||
},
|
||||
{
|
||||
name: '1080p',
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
videoBitrate: calculateBitrate(1920, 1080, 30),
|
||||
audioBitrate: '256k'
|
||||
},
|
||||
{
|
||||
name: '1440p',
|
||||
width: 2560,
|
||||
height: 1440,
|
||||
videoBitrate: calculateBitrate(2560, 1440, 30),
|
||||
audioBitrate: '256k'
|
||||
},
|
||||
{
|
||||
name: '2160p',
|
||||
width: 3840,
|
||||
height: 2160,
|
||||
videoBitrate: calculateBitrate(3840, 2160, 30),
|
||||
audioBitrate: '256k'
|
||||
}
|
||||
];
|
||||
|
||||
/**
|
||||
* Select appropriate profiles based on input video resolution
|
||||
* Only creates profiles that are equal to or smaller than input resolution
|
||||
* Always generates 30 FPS profiles by default
|
||||
* For high FPS (>30), user must explicitly specify in customProfiles
|
||||
*/
|
||||
export function selectProfiles(
|
||||
inputWidth: number,
|
||||
inputHeight: number,
|
||||
inputFPS: number = 30,
|
||||
sourceBitrate?: number
|
||||
): VideoProfile[] {
|
||||
const profiles: VideoProfile[] = [];
|
||||
|
||||
// Standard 30 FPS profiles (always created)
|
||||
const baseProfiles = DEFAULT_PROFILES.filter(profile => {
|
||||
return profile.width <= inputWidth && profile.height <= inputHeight;
|
||||
});
|
||||
|
||||
// Add standard 30fps profiles with bitrate limit
|
||||
for (const profile of baseProfiles) {
|
||||
profiles.push({
|
||||
...profile,
|
||||
videoBitrate: calculateBitrate(profile.width, profile.height, 30, sourceBitrate),
|
||||
fps: 30
|
||||
});
|
||||
}
|
||||
|
||||
return profiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create high FPS profile variant
|
||||
* Used for creating 60fps, 90fps, 120fps versions
|
||||
*/
|
||||
export function createHighFPSProfile(
|
||||
baseProfile: VideoProfile,
|
||||
fps: number,
|
||||
maxBitrate?: number
|
||||
): VideoProfile {
|
||||
return {
|
||||
...baseProfile,
|
||||
name: `${baseProfile.name}-${fps}`,
|
||||
videoBitrate: calculateBitrate(baseProfile.width, baseProfile.height, fps, maxBitrate),
|
||||
fps
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse profile string into resolution and FPS
|
||||
* Examples:
|
||||
* '360' => { resolution: '360p', fps: 30 }
|
||||
* '720@60' => { resolution: '720p', fps: 60 }
|
||||
* '1080-60' => { resolution: '1080p', fps: 60 }
|
||||
* '360p', '720p@60' also supported (with 'p')
|
||||
*/
|
||||
function parseProfileString(profileStr: string): { resolution: string; fps: number } | null {
|
||||
const trimmed = profileStr.trim();
|
||||
|
||||
// Match patterns: 360, 720@60, 1080-60, 360p, 720p@60, 1080p-60
|
||||
const match = trimmed.match(/^(\d+)p?(?:[@-](\d+))?$/i);
|
||||
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const resolution = match[1] + 'p'; // Always add 'p'
|
||||
const fps = match[2] ? parseInt(match[2]) : 30;
|
||||
|
||||
return { resolution, fps };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get profile by resolution name and FPS
|
||||
* Returns VideoProfile or null if not found
|
||||
*/
|
||||
export function getProfileByName(
|
||||
resolution: string,
|
||||
fps: number = 30,
|
||||
maxBitrate?: number
|
||||
): VideoProfile | null {
|
||||
const baseProfile = DEFAULT_PROFILES.find(p => p.name === resolution);
|
||||
|
||||
if (!baseProfile) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (fps === 30) {
|
||||
return {
|
||||
...baseProfile,
|
||||
videoBitrate: calculateBitrate(baseProfile.width, baseProfile.height, 30, maxBitrate),
|
||||
fps: 30
|
||||
};
|
||||
}
|
||||
|
||||
return createHighFPSProfile(baseProfile, fps, maxBitrate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate if profile can be created from source
|
||||
* Returns object with error, warning, and adjusted FPS
|
||||
*/
|
||||
export function validateProfile(
|
||||
profileStr: string,
|
||||
sourceWidth: number,
|
||||
sourceHeight: number,
|
||||
sourceFPS: number
|
||||
): { error?: string; warning?: string; adjustedFps?: number } {
|
||||
const parsed = parseProfileString(profileStr);
|
||||
|
||||
if (!parsed) {
|
||||
return { error: `Invalid profile format: ${profileStr}. Use format like: 360, 720@60, 1080-60` };
|
||||
}
|
||||
|
||||
const profile = getProfileByName(parsed.resolution, parsed.fps);
|
||||
|
||||
if (!profile) {
|
||||
return { error: `Unknown resolution: ${parsed.resolution}. Available: 360, 480, 720, 1080, 1440, 2160` };
|
||||
}
|
||||
|
||||
// Check if source supports this resolution
|
||||
if (profile.width > sourceWidth || profile.height > sourceHeight) {
|
||||
return { error: `Source resolution (${sourceWidth}x${sourceHeight}) is lower than ${profileStr} (${profile.width}x${profile.height})` };
|
||||
}
|
||||
|
||||
// Check if requested FPS exceeds source FPS
|
||||
const MAX_FPS = 120;
|
||||
let adjustedFps = parsed.fps;
|
||||
let warning: string | undefined;
|
||||
|
||||
if (parsed.fps > sourceFPS) {
|
||||
// Cap to source FPS (but not more than MAX_FPS)
|
||||
adjustedFps = Math.min(sourceFPS, MAX_FPS);
|
||||
warning = `Requested ${parsed.fps} FPS in ${profileStr}, but source is ${sourceFPS} FPS. Using ${adjustedFps} FPS instead`;
|
||||
} else if (parsed.fps > MAX_FPS) {
|
||||
// Cap to MAX_FPS
|
||||
adjustedFps = MAX_FPS;
|
||||
warning = `Requested ${parsed.fps} FPS in ${profileStr} exceeds maximum ${MAX_FPS} FPS. Using ${adjustedFps} FPS instead`;
|
||||
}
|
||||
|
||||
return warning ? { warning, adjustedFps } : {}; // Valid
|
||||
}
|
||||
|
||||
/**
|
||||
* Create profiles from custom string list
|
||||
* Example: ['360p', '720p@60', '1080p'] => VideoProfile[]
|
||||
*/
|
||||
export function createProfilesFromStrings(
|
||||
profileStrings: string[],
|
||||
sourceWidth: number,
|
||||
sourceHeight: number,
|
||||
sourceFPS: number,
|
||||
sourceBitrate?: number
|
||||
): { profiles: VideoProfile[]; errors: string[]; warnings: string[] } {
|
||||
const profiles: VideoProfile[] = [];
|
||||
const errors: string[] = [];
|
||||
const warnings: string[] = [];
|
||||
|
||||
for (const profileStr of profileStrings) {
|
||||
// Validate
|
||||
const result = validateProfile(profileStr, sourceWidth, sourceHeight, sourceFPS);
|
||||
|
||||
if (result.error) {
|
||||
errors.push(result.error);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (result.warning) {
|
||||
warnings.push(result.warning);
|
||||
}
|
||||
|
||||
// Parse and create
|
||||
const parsed = parseProfileString(profileStr);
|
||||
if (!parsed) continue; // Already validated, shouldn't happen
|
||||
|
||||
// Use adjusted FPS if available (when requested FPS > source FPS)
|
||||
const targetFps = result.adjustedFps !== undefined ? result.adjustedFps : parsed.fps;
|
||||
|
||||
const profile = getProfileByName(parsed.resolution, targetFps, sourceBitrate);
|
||||
if (profile) {
|
||||
profiles.push(profile);
|
||||
}
|
||||
}
|
||||
|
||||
return { profiles, errors, warnings };
|
||||
}
|
||||
|
||||
527
src/core/converter.ts
Normal file
527
src/core/converter.ts
Normal file
@@ -0,0 +1,527 @@
|
||||
import { join, basename, extname } from 'node:path';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import { rm } from 'node:fs/promises';
|
||||
import type {
|
||||
DashConvertOptions,
|
||||
DashConvertResult,
|
||||
VideoProfile,
|
||||
ThumbnailConfig,
|
||||
ConversionProgress,
|
||||
CodecType,
|
||||
StreamingFormat,
|
||||
HardwareAccelerationOption,
|
||||
HardwareAccelerator,
|
||||
HardwareEncoderInfo
|
||||
} from '../types';
|
||||
import {
|
||||
checkFFmpeg,
|
||||
checkMP4Box,
|
||||
getVideoMetadata,
|
||||
ensureDir,
|
||||
setLogFile,
|
||||
detectHardwareEncoders
|
||||
} from '../utils';
|
||||
import { selectProfiles, createProfilesFromStrings } from '../config/profiles';
|
||||
import { generateThumbnailSprite, generatePoster } from './thumbnails';
|
||||
import { encodeProfilesToMP4 } from './encoding';
|
||||
import { packageToFormats } from './packaging';
|
||||
|
||||
/**
|
||||
* Convert video to DASH format with NVENC acceleration
|
||||
* Two-stage approach: FFmpeg encoding → MP4Box packaging
|
||||
*/
|
||||
export async function convertToDash(
|
||||
options: DashConvertOptions
|
||||
): Promise<DashConvertResult> {
|
||||
const {
|
||||
input,
|
||||
outputDir,
|
||||
segmentDuration = 2,
|
||||
profiles: userProfiles,
|
||||
customProfiles,
|
||||
codec = 'dual',
|
||||
format = 'both',
|
||||
useNvenc,
|
||||
hardwareAccelerator,
|
||||
quality,
|
||||
generateThumbnails = true,
|
||||
thumbnailConfig = {},
|
||||
generatePoster: shouldGeneratePoster = true,
|
||||
posterTimecode = '00:00:00',
|
||||
parallel = true,
|
||||
onProgress
|
||||
} = options;
|
||||
|
||||
// Create unique temp directory
|
||||
const tempDir = join('/tmp', `dash-converter-${randomUUID()}`);
|
||||
await ensureDir(tempDir);
|
||||
|
||||
// Create video output directory and initialize logging
|
||||
const videoName = basename(input, extname(input));
|
||||
const videoOutputDir = join(outputDir, videoName);
|
||||
await ensureDir(videoOutputDir);
|
||||
|
||||
// Initialize log file
|
||||
const logFile = join(videoOutputDir, 'conversion.log');
|
||||
setLogFile(logFile);
|
||||
|
||||
// Write log header
|
||||
const { writeFile } = await import('node:fs/promises');
|
||||
const header = `===========================================
|
||||
DASH Conversion Log
|
||||
Started: ${new Date().toISOString()}
|
||||
Input: ${input}
|
||||
Output: ${videoOutputDir}
|
||||
Codec: ${codec}
|
||||
Format: ${format}
|
||||
===========================================\n`;
|
||||
await writeFile(logFile, header, 'utf-8');
|
||||
|
||||
try {
|
||||
return await convertToDashInternal(
|
||||
input,
|
||||
outputDir,
|
||||
tempDir,
|
||||
segmentDuration,
|
||||
userProfiles,
|
||||
customProfiles,
|
||||
codec,
|
||||
format,
|
||||
useNvenc,
|
||||
hardwareAccelerator,
|
||||
quality,
|
||||
generateThumbnails,
|
||||
thumbnailConfig,
|
||||
shouldGeneratePoster,
|
||||
posterTimecode,
|
||||
parallel,
|
||||
onProgress
|
||||
);
|
||||
} finally {
|
||||
// Write completion to log
|
||||
const { appendFile } = await import('node:fs/promises');
|
||||
try {
|
||||
await appendFile(logFile, `\nCompleted: ${new Date().toISOString()}\n`, 'utf-8');
|
||||
} catch (err) {
|
||||
// Ignore log write errors
|
||||
}
|
||||
|
||||
// Cleanup temp directory
|
||||
try {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
} catch (err) {
|
||||
console.warn(`Warning: Failed to cleanup temp directory: ${tempDir}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal conversion logic
|
||||
*/
|
||||
async function convertToDashInternal(
|
||||
input: string,
|
||||
outputDir: string,
|
||||
tempDir: string,
|
||||
segmentDuration: number,
|
||||
userProfiles: VideoProfile[] | undefined,
|
||||
customProfiles: string[] | undefined,
|
||||
codec: CodecType,
|
||||
format: StreamingFormat,
|
||||
useNvenc: boolean | undefined,
|
||||
hardwareAccelerator: HardwareAccelerationOption | undefined,
|
||||
quality: DashConvertOptions['quality'],
|
||||
generateThumbnails: boolean,
|
||||
thumbnailConfig: ThumbnailConfig,
|
||||
generatePosterFlag: boolean,
|
||||
posterTimecode: string,
|
||||
parallel: boolean,
|
||||
onProgress?: (progress: ConversionProgress) => void
|
||||
): Promise<DashConvertResult> {
|
||||
|
||||
// Validate dependencies
|
||||
if (!await checkFFmpeg()) {
|
||||
throw new Error('FFmpeg is not installed or not in PATH');
|
||||
}
|
||||
|
||||
if (!await checkMP4Box()) {
|
||||
throw new Error('MP4Box is not installed or not in PATH. Install gpac package.');
|
||||
}
|
||||
|
||||
// Report progress
|
||||
const reportProgress = (stage: ConversionProgress['stage'], percent: number, message?: string, currentProfile?: string) => {
|
||||
if (onProgress) {
|
||||
onProgress({ stage, percent, message, currentProfile });
|
||||
}
|
||||
};
|
||||
|
||||
reportProgress('analyzing', 0, 'Analyzing input video...');
|
||||
|
||||
// Get video metadata
|
||||
const metadata = await getVideoMetadata(input);
|
||||
const hasAudio = metadata.hasAudio;
|
||||
|
||||
// Determine hardware accelerator (auto by default)
|
||||
const preferredAccelerator: HardwareAccelerationOption =
|
||||
hardwareAccelerator && hardwareAccelerator !== 'auto'
|
||||
? hardwareAccelerator
|
||||
: useNvenc === true
|
||||
? 'nvenc'
|
||||
: useNvenc === false
|
||||
? 'cpu'
|
||||
: 'auto';
|
||||
|
||||
const hardwareEncoders = await detectHardwareEncoders();
|
||||
|
||||
const { selected, h264Encoder, av1Encoder, warnings: accelWarnings } = selectHardwareEncoders(
|
||||
hardwareEncoders,
|
||||
preferredAccelerator,
|
||||
codec
|
||||
);
|
||||
|
||||
if (accelWarnings.length > 0) {
|
||||
for (const warn of accelWarnings) {
|
||||
console.warn(`⚠️ ${warn}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Select profiles
|
||||
let profiles: VideoProfile[];
|
||||
|
||||
if (customProfiles && customProfiles.length > 0) {
|
||||
// User specified custom profiles via CLI
|
||||
const result = createProfilesFromStrings(
|
||||
customProfiles,
|
||||
metadata.width,
|
||||
metadata.height,
|
||||
metadata.fps,
|
||||
metadata.videoBitrate
|
||||
);
|
||||
|
||||
// Show errors if any
|
||||
if (result.errors.length > 0) {
|
||||
console.warn('\n❌ Profile errors:');
|
||||
for (const error of result.errors) {
|
||||
console.warn(` - ${error}`);
|
||||
}
|
||||
console.warn('');
|
||||
}
|
||||
|
||||
// Show warnings if any
|
||||
if (result.warnings.length > 0) {
|
||||
console.warn('\n⚠️ Profile warnings:');
|
||||
for (const warning of result.warnings) {
|
||||
console.warn(` - ${warning}`);
|
||||
}
|
||||
console.warn('');
|
||||
}
|
||||
|
||||
profiles = result.profiles;
|
||||
|
||||
if (profiles.length === 0) {
|
||||
throw new Error('No valid profiles found in custom list. Check errors above.');
|
||||
}
|
||||
} else if (userProfiles) {
|
||||
// Programmatic API usage
|
||||
profiles = userProfiles;
|
||||
} else {
|
||||
// Default: auto-select based on source
|
||||
profiles = selectProfiles(
|
||||
metadata.width,
|
||||
metadata.height,
|
||||
metadata.fps,
|
||||
metadata.videoBitrate
|
||||
);
|
||||
}
|
||||
|
||||
if (profiles.length === 0) {
|
||||
throw new Error('No suitable profiles found for input video resolution');
|
||||
}
|
||||
|
||||
// Create video name directory
|
||||
const inputBasename = basename(input, extname(input));
|
||||
const videoOutputDir = join(outputDir, inputBasename);
|
||||
|
||||
// Clean up previous conversion if exists
|
||||
try {
|
||||
await rm(videoOutputDir, { recursive: true, force: true });
|
||||
} catch (err) {
|
||||
// Directory might not exist, that's fine
|
||||
}
|
||||
|
||||
await ensureDir(videoOutputDir);
|
||||
|
||||
// Determine which codecs to use based on codec parameter
|
||||
const codecs: Array<{ type: 'h264' | 'av1'; codec: string; preset: string }> = [];
|
||||
|
||||
if (codec === 'h264' || codec === 'dual') {
|
||||
const h264Codec = h264Encoder || 'libx264';
|
||||
const h264Preset = resolvePresetForEncoder(h264Codec, 'h264');
|
||||
codecs.push({ type: 'h264', codec: h264Codec, preset: h264Preset });
|
||||
}
|
||||
|
||||
if (codec === 'av1' || codec === 'dual') {
|
||||
const av1Codec = av1Encoder || 'libsvtav1';
|
||||
const av1Preset = resolvePresetForEncoder(av1Codec, 'av1');
|
||||
codecs.push({ type: 'av1', codec: av1Codec, preset: av1Preset });
|
||||
}
|
||||
|
||||
const codecNames = codecs.map(c => c.type.toUpperCase()).join(' + ');
|
||||
const accelLabel = selected === 'cpu' ? 'CPU' : selected.toUpperCase();
|
||||
reportProgress('analyzing', 20, `Using ${codecNames} encoding (${accelLabel})`, undefined);
|
||||
|
||||
const maxConcurrent = selected === 'cpu' ? 2 : 3;
|
||||
|
||||
// STAGE 1: Encode profiles to MP4 for each codec (parallel - heavy work)
|
||||
const codecMP4Paths = new Map<'h264' | 'av1', Map<string, string>>();
|
||||
|
||||
for (let codecIndex = 0; codecIndex < codecs.length; codecIndex++) {
|
||||
const { type, codec: videoCodec, preset: codecPreset } = codecs[codecIndex];
|
||||
const codecProgress = codecIndex / codecs.length;
|
||||
const codecProgressRange = 1 / codecs.length;
|
||||
|
||||
reportProgress('encoding', 25 + codecProgress * 40, `Stage 1: Encoding ${type.toUpperCase()} (${profiles.length} profiles)...`);
|
||||
|
||||
// Get quality settings for this codec
|
||||
const codecQuality = type === 'h264' ? quality?.h264 : quality?.av1;
|
||||
|
||||
const tempMP4Paths = await encodeProfilesToMP4(
|
||||
input,
|
||||
tempDir,
|
||||
profiles,
|
||||
videoCodec,
|
||||
codecPreset,
|
||||
metadata.duration,
|
||||
segmentDuration,
|
||||
metadata.audioBitrate,
|
||||
parallel,
|
||||
maxConcurrent,
|
||||
type, // Pass codec type to differentiate output files
|
||||
codecQuality, // Pass quality settings (CQ/CRF)
|
||||
undefined, // optimizations - for future use
|
||||
(profileName, percent) => {
|
||||
const profileIndex = profiles.findIndex(p => p.name === profileName);
|
||||
const baseProgress = 25 + codecProgress * 40;
|
||||
const profileProgress = (percent / 100) * (40 * codecProgressRange / profiles.length);
|
||||
reportProgress('encoding', baseProgress + profileProgress, `Encoding ${type.toUpperCase()} ${profileName}...`, `${type}-${profileName}`);
|
||||
|
||||
// Also report individual profile progress
|
||||
if (onProgress) {
|
||||
onProgress({
|
||||
stage: 'encoding',
|
||||
percent: baseProgress + profileProgress,
|
||||
currentProfile: `${type}-${profileName}`,
|
||||
profilePercent: percent,
|
||||
message: `Encoding ${type.toUpperCase()} ${profileName}...`
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
codecMP4Paths.set(type, tempMP4Paths);
|
||||
}
|
||||
|
||||
reportProgress('encoding', 65, 'Stage 1 complete: All codecs and profiles encoded');
|
||||
|
||||
// STAGE 2: Package to segments and manifests (unified, no duplication)
|
||||
reportProgress('encoding', 70, `Stage 2: Creating segments and manifests...`);
|
||||
|
||||
const { manifestPath, hlsManifestPath } = await packageToFormats(
|
||||
codecMP4Paths,
|
||||
videoOutputDir,
|
||||
profiles,
|
||||
segmentDuration,
|
||||
codec,
|
||||
format,
|
||||
hasAudio
|
||||
);
|
||||
|
||||
// Collect all video paths from all codecs
|
||||
const videoPaths: string[] = [];
|
||||
for (const mp4Paths of codecMP4Paths.values()) {
|
||||
videoPaths.push(...Array.from(mp4Paths.values()));
|
||||
}
|
||||
|
||||
reportProgress('encoding', 80, 'Stage 2 complete: All formats packaged');
|
||||
|
||||
// Generate thumbnails
|
||||
let thumbnailSpritePath: string | undefined;
|
||||
let thumbnailVttPath: string | undefined;
|
||||
|
||||
if (generateThumbnails) {
|
||||
reportProgress('thumbnails', 80, 'Generating thumbnail sprites...');
|
||||
|
||||
const thumbConfig: Required<ThumbnailConfig> = {
|
||||
width: thumbnailConfig.width || 160,
|
||||
height: thumbnailConfig.height || 90,
|
||||
interval: thumbnailConfig.interval || 1, // 1 секунда по умолчанию
|
||||
columns: thumbnailConfig.columns || 10
|
||||
};
|
||||
|
||||
const thumbResult = await generateThumbnailSprite(
|
||||
input,
|
||||
videoOutputDir,
|
||||
metadata.duration,
|
||||
thumbConfig
|
||||
);
|
||||
|
||||
thumbnailSpritePath = thumbResult.spritePath;
|
||||
thumbnailVttPath = thumbResult.vttPath;
|
||||
|
||||
reportProgress('thumbnails', 90, 'Thumbnails generated');
|
||||
}
|
||||
|
||||
// Generate poster
|
||||
let posterPath: string | undefined;
|
||||
|
||||
if (generatePosterFlag) {
|
||||
reportProgress('thumbnails', 92, 'Generating poster image...');
|
||||
|
||||
posterPath = await generatePoster(
|
||||
input,
|
||||
videoOutputDir,
|
||||
posterTimecode
|
||||
);
|
||||
|
||||
reportProgress('thumbnails', 95, 'Poster generated');
|
||||
}
|
||||
|
||||
// Finalize
|
||||
reportProgress('manifest', 95, 'Finalizing...');
|
||||
|
||||
// Note: manifestPath/hlsManifestPath are already created by MP4Box in packageToDash/packageToHLS
|
||||
// No need for separate generateManifest function
|
||||
|
||||
reportProgress('complete', 100, 'Conversion complete!');
|
||||
|
||||
return {
|
||||
manifestPath,
|
||||
hlsManifestPath,
|
||||
videoPaths,
|
||||
thumbnailSpritePath,
|
||||
thumbnailVttPath,
|
||||
posterPath,
|
||||
duration: metadata.duration,
|
||||
profiles,
|
||||
usedNvenc: codecs.some(c => c.codec.includes('nvenc')),
|
||||
selectedAccelerator: selected,
|
||||
codecType: codec,
|
||||
format
|
||||
};
|
||||
}
|
||||
|
||||
const ACCEL_PRIORITY: Record<HardwareAccelerator, number> = {
|
||||
nvenc: 100,
|
||||
qsv: 90,
|
||||
amf: 80,
|
||||
vaapi: 70,
|
||||
videotoolbox: 65,
|
||||
v4l2: 60,
|
||||
cpu: 1
|
||||
};
|
||||
|
||||
function selectHardwareEncoders(
|
||||
available: HardwareEncoderInfo[],
|
||||
preferred: HardwareAccelerationOption,
|
||||
codec: CodecType
|
||||
): {
|
||||
selected: HardwareAccelerator;
|
||||
h264Encoder?: string;
|
||||
av1Encoder?: string;
|
||||
warnings: string[];
|
||||
} {
|
||||
const needsH264 = codec === 'h264' || codec === 'dual';
|
||||
const needsAV1 = codec === 'av1' || codec === 'dual';
|
||||
const warnings: string[] = [];
|
||||
|
||||
const supportedForAuto = new Set<HardwareAccelerator>(['nvenc', 'qsv', 'amf']);
|
||||
const relevant = available.filter(info =>
|
||||
(needsH264 && info.h264Encoder) || (needsAV1 && info.av1Encoder)
|
||||
);
|
||||
const autoRelevant = relevant.filter(info => supportedForAuto.has(info.accelerator));
|
||||
|
||||
const pickByAccel = (acc: HardwareAccelerator) =>
|
||||
relevant.find(item => item.accelerator === acc);
|
||||
|
||||
let base: HardwareEncoderInfo | undefined;
|
||||
|
||||
if (preferred !== 'auto') {
|
||||
if (preferred === 'cpu') {
|
||||
base = undefined;
|
||||
} else if (!supportedForAuto.has(preferred)) {
|
||||
warnings.push(`Ускоритель "${preferred}" пока не поддерживается, использую CPU`);
|
||||
} else {
|
||||
base = pickByAccel(preferred);
|
||||
if (!base) {
|
||||
throw new Error(`Аппаратный ускоритель "${preferred}" недоступен в системе`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const pool = autoRelevant.length > 0 ? autoRelevant : [];
|
||||
base = pool.sort(
|
||||
(a, b) => (ACCEL_PRIORITY[b.accelerator] || 0) - (ACCEL_PRIORITY[a.accelerator] || 0)
|
||||
)[0];
|
||||
|
||||
if (!base && relevant.length > 0) {
|
||||
warnings.push('Доступен аппаратный ускоритель, но он пока не поддерживается пайплайном, использую CPU');
|
||||
}
|
||||
}
|
||||
|
||||
const fallbackPool = autoRelevant.length > 0 ? autoRelevant : [];
|
||||
const fallbackList = fallbackPool.sort(
|
||||
(a, b) => (ACCEL_PRIORITY[b.accelerator] || 0) - (ACCEL_PRIORITY[a.accelerator] || 0)
|
||||
);
|
||||
|
||||
const pickEncoder = (codecType: 'h264' | 'av1') => {
|
||||
const direct = codecType === 'h264' ? base?.h264Encoder : base?.av1Encoder;
|
||||
if (direct) return { encoder: direct, accel: base?.accelerator };
|
||||
|
||||
const alt = fallbackList.find(info => (codecType === 'h264' ? info.h264Encoder : info.av1Encoder));
|
||||
if (alt) {
|
||||
if (preferred !== 'auto' && base) {
|
||||
warnings.push(
|
||||
`Выбранный ускоритель "${base.accelerator}" не поддерживает ${codecType.toUpperCase()}, использую ${alt.accelerator}`
|
||||
);
|
||||
}
|
||||
return {
|
||||
encoder: codecType === 'h264' ? alt.h264Encoder! : alt.av1Encoder!,
|
||||
accel: alt.accelerator
|
||||
};
|
||||
}
|
||||
|
||||
if (preferred !== 'auto' && preferred !== 'cpu') {
|
||||
warnings.push(
|
||||
`Ускоритель "${preferred}" не поддерживает ${codecType.toUpperCase()}, использую CPU`
|
||||
);
|
||||
}
|
||||
|
||||
return { encoder: undefined, accel: 'cpu' as HardwareAccelerator };
|
||||
};
|
||||
|
||||
const h264Result = needsH264 ? pickEncoder('h264') : { encoder: undefined, accel: base?.accelerator };
|
||||
const av1Result = needsAV1 ? pickEncoder('av1') : { encoder: undefined, accel: base?.accelerator };
|
||||
|
||||
const selectedAccel = (base?.accelerator || h264Result.accel || av1Result.accel || 'cpu') as HardwareAccelerator;
|
||||
|
||||
return {
|
||||
selected: selectedAccel,
|
||||
h264Encoder: h264Result.encoder,
|
||||
av1Encoder: av1Result.encoder,
|
||||
warnings
|
||||
};
|
||||
}
|
||||
|
||||
function resolvePresetForEncoder(encoder: string, codecType: 'h264' | 'av1'): string {
|
||||
if (encoder.includes('nvenc')) return 'p4';
|
||||
if (encoder.includes('qsv')) return 'medium';
|
||||
if (encoder.includes('amf')) return 'balanced';
|
||||
if (encoder.includes('vaapi')) return '5';
|
||||
if (encoder.includes('videotoolbox')) return 'medium';
|
||||
if (encoder.includes('v4l2')) return 'medium';
|
||||
|
||||
// CPU fallback presets
|
||||
if (encoder === 'libsvtav1') return '8';
|
||||
if (encoder === 'libx264') return 'medium';
|
||||
|
||||
// Default safe preset
|
||||
return codecType === 'av1' ? '8' : 'medium';
|
||||
}
|
||||
261
src/core/encoding.ts
Normal file
261
src/core/encoding.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import { join } from 'node:path';
|
||||
import { execFFmpeg, selectAudioBitrate } from '../utils';
|
||||
import type { VideoProfile, VideoOptimizations, CodecQualitySettings } from '../types';
|
||||
|
||||
/**
|
||||
* Get default CQ/CRF value based on resolution and codec
|
||||
*/
|
||||
function getDefaultQuality(height: number, codecType: 'h264' | 'av1', isGPU: boolean): number {
|
||||
if (isGPU) {
|
||||
// GPU encoders use CQ - ФИКСИРОВАННЫЕ ЗНАЧЕНИЯ ДЛЯ ТЕСТИРОВАНИЯ
|
||||
if (codecType === 'h264') {
|
||||
// H.264 NVENC CQ = 32 (для всех разрешений)
|
||||
return 32;
|
||||
} else {
|
||||
// AV1 NVENC CQ = 42 (для всех разрешений)
|
||||
return 42;
|
||||
}
|
||||
} else {
|
||||
// CPU encoders use CRF
|
||||
if (codecType === 'h264') {
|
||||
// libx264 CRF (на ~3-5 ниже чем NVENC CQ)
|
||||
if (height <= 360) return 25;
|
||||
if (height <= 480) return 24;
|
||||
if (height <= 720) return 23;
|
||||
if (height <= 1080) return 22;
|
||||
if (height <= 1440) return 21;
|
||||
return 20; // 4K
|
||||
} else {
|
||||
// libsvtav1 CRF (шкала 0-63, на ~20% выше чем NVENC CQ)
|
||||
if (height <= 360) return 40;
|
||||
if (height <= 480) return 38;
|
||||
if (height <= 720) return 35;
|
||||
if (height <= 1080) return 32;
|
||||
if (height <= 1440) return 30;
|
||||
return 28; // 4K
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode single profile to MP4
|
||||
* Stage 1: Heavy work - video encoding with optional optimizations
|
||||
*/
|
||||
export async function encodeProfileToMP4(
|
||||
input: string,
|
||||
tempDir: string,
|
||||
profile: VideoProfile,
|
||||
videoCodec: string,
|
||||
preset: string,
|
||||
duration: number,
|
||||
segmentDuration: number,
|
||||
sourceAudioBitrate: number | undefined,
|
||||
codecType: 'h264' | 'av1',
|
||||
qualitySettings?: CodecQualitySettings,
|
||||
optimizations?: VideoOptimizations,
|
||||
onProgress?: (percent: number) => void
|
||||
): Promise<string> {
|
||||
const outputPath = join(tempDir, `video_${codecType}_${profile.name}.mp4`);
|
||||
|
||||
const args = [
|
||||
'-y',
|
||||
'-i', input,
|
||||
'-c:v', videoCodec
|
||||
];
|
||||
|
||||
// Determine if using GPU or CPU encoder
|
||||
const isGPU = videoCodec.includes('nvenc') || videoCodec.includes('qsv') || videoCodec.includes('amf') || videoCodec.includes('vaapi') || videoCodec.includes('videotoolbox') || videoCodec.includes('v4l2');
|
||||
|
||||
// Determine quality value (CQ for GPU, CRF for CPU)
|
||||
let qualityValue: number;
|
||||
if (isGPU && qualitySettings?.cq !== undefined) {
|
||||
qualityValue = qualitySettings.cq;
|
||||
} else if (!isGPU && qualitySettings?.crf !== undefined) {
|
||||
qualityValue = qualitySettings.crf;
|
||||
} else {
|
||||
// Use default quality based on resolution
|
||||
qualityValue = getDefaultQuality(profile.height, codecType, isGPU);
|
||||
}
|
||||
|
||||
// Add codec-specific options with CQ/CRF
|
||||
if (videoCodec === 'h264_nvenc') {
|
||||
// NVIDIA H.264 with CQ
|
||||
args.push('-rc:v', 'vbr');
|
||||
args.push('-cq', String(qualityValue));
|
||||
args.push('-preset', preset);
|
||||
args.push('-2pass', '0');
|
||||
} else if (videoCodec === 'av1_nvenc') {
|
||||
// NVIDIA AV1 with CQ
|
||||
args.push('-rc:v', 'vbr');
|
||||
args.push('-cq', String(qualityValue));
|
||||
args.push('-preset', preset);
|
||||
args.push('-2pass', '0');
|
||||
} else if (videoCodec === 'av1_qsv') {
|
||||
// Intel QSV AV1
|
||||
args.push('-preset', preset);
|
||||
args.push('-global_quality', String(qualityValue));
|
||||
} else if (videoCodec === 'h264_qsv') {
|
||||
// Intel QSV H.264
|
||||
args.push('-preset', preset);
|
||||
args.push('-global_quality', String(qualityValue));
|
||||
} else if (videoCodec === 'av1_amf') {
|
||||
// AMD AMF AV1
|
||||
args.push('-quality', 'balanced');
|
||||
args.push('-rc', 'cqp');
|
||||
args.push('-qp_i', String(qualityValue));
|
||||
args.push('-qp_p', String(qualityValue));
|
||||
} else if (videoCodec === 'h264_amf') {
|
||||
// AMD AMF H.264
|
||||
args.push('-quality', 'balanced');
|
||||
args.push('-rc', 'cqp');
|
||||
args.push('-qp_i', String(qualityValue));
|
||||
args.push('-qp_p', String(qualityValue));
|
||||
} else if (videoCodec === 'libsvtav1') {
|
||||
// CPU-based SVT-AV1 with CRF
|
||||
args.push('-crf', String(qualityValue));
|
||||
args.push('-preset', preset); // 0-13, 8 is medium speed
|
||||
args.push('-svtav1-params', 'tune=0:enable-overlays=1');
|
||||
} else if (videoCodec === 'libx264') {
|
||||
// CPU-based x264 with CRF
|
||||
args.push('-crf', String(qualityValue));
|
||||
args.push('-preset', preset);
|
||||
} else {
|
||||
// Default fallback
|
||||
args.push('-preset', preset);
|
||||
}
|
||||
|
||||
// Add maxrate as safety limit (optional but recommended for streaming)
|
||||
// This prevents extreme bitrate spikes on complex scenes
|
||||
const bitrateMultiplier = codecType === 'av1' ? 0.6 : 1.0;
|
||||
const maxBitrate = Math.round(parseInt(profile.videoBitrate) * bitrateMultiplier * 1.5); // +50% headroom
|
||||
args.push('-maxrate', `${maxBitrate}k`);
|
||||
args.push('-bufsize', `${maxBitrate * 2}k`);
|
||||
|
||||
// Set GOP size for DASH segments
|
||||
// Keyframes must align with segment boundaries
|
||||
const fps = profile.fps || 30;
|
||||
const gopSize = Math.round(fps * segmentDuration);
|
||||
args.push(
|
||||
'-g', String(gopSize), // GOP size (e.g., 25 fps * 2 sec = 50 frames)
|
||||
'-keyint_min', String(gopSize), // Minimum interval between keyframes
|
||||
'-sc_threshold', '0' // Disable scene change detection (keeps GOP consistent)
|
||||
);
|
||||
|
||||
// Build video filter chain
|
||||
const filters: string[] = [`scale=${profile.width}:${profile.height}`];
|
||||
|
||||
// Apply optimizations (for future use)
|
||||
if (optimizations) {
|
||||
if (optimizations.deinterlace) {
|
||||
filters.push('yadif');
|
||||
}
|
||||
if (optimizations.denoise) {
|
||||
filters.push('hqdn3d');
|
||||
}
|
||||
if (optimizations.customFilters) {
|
||||
filters.push(...optimizations.customFilters);
|
||||
}
|
||||
}
|
||||
|
||||
args.push('-vf', filters.join(','));
|
||||
|
||||
// Audio encoding
|
||||
// Select optimal bitrate based on source (don't upscale)
|
||||
const targetAudioBitrate = parseInt(profile.audioBitrate) || 256;
|
||||
const optimalAudioBitrate = selectAudioBitrate(sourceAudioBitrate, targetAudioBitrate);
|
||||
args.push('-c:a', 'aac', '-b:a', optimalAudioBitrate);
|
||||
|
||||
// Audio optimizations
|
||||
if (optimizations?.audioNormalize) {
|
||||
args.push('-af', 'loudnorm');
|
||||
}
|
||||
|
||||
// Output
|
||||
args.push('-f', 'mp4', outputPath);
|
||||
|
||||
await execFFmpeg(args, onProgress, duration);
|
||||
|
||||
return outputPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode all profiles to MP4 (parallel or sequential)
|
||||
* Stage 1: Main encoding work
|
||||
*/
|
||||
export async function encodeProfilesToMP4(
|
||||
input: string,
|
||||
tempDir: string,
|
||||
profiles: VideoProfile[],
|
||||
videoCodec: string,
|
||||
preset: string,
|
||||
duration: number,
|
||||
segmentDuration: number,
|
||||
sourceAudioBitrate: number | undefined,
|
||||
parallel: boolean,
|
||||
maxConcurrent: number,
|
||||
codecType: 'h264' | 'av1',
|
||||
qualitySettings?: CodecQualitySettings,
|
||||
optimizations?: VideoOptimizations,
|
||||
onProgress?: (profileName: string, percent: number) => void
|
||||
): Promise<Map<string, string>> {
|
||||
const mp4Files = new Map<string, string>();
|
||||
|
||||
if (parallel && profiles.length > 1) {
|
||||
// Parallel encoding with batching
|
||||
for (let i = 0; i < profiles.length; i += maxConcurrent) {
|
||||
const batch = profiles.slice(i, i + maxConcurrent);
|
||||
const batchPromises = batch.map((profile) =>
|
||||
encodeProfileToMP4(
|
||||
input,
|
||||
tempDir,
|
||||
profile,
|
||||
videoCodec,
|
||||
preset,
|
||||
duration,
|
||||
segmentDuration,
|
||||
sourceAudioBitrate,
|
||||
codecType,
|
||||
qualitySettings,
|
||||
optimizations,
|
||||
(percent) => {
|
||||
if (onProgress) {
|
||||
onProgress(profile.name, percent);
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const batchResults = await Promise.all(batchPromises);
|
||||
batchResults.forEach((mp4Path, idx) => {
|
||||
const profile = batch[idx];
|
||||
mp4Files.set(profile.name, mp4Path);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Sequential encoding
|
||||
for (const profile of profiles) {
|
||||
const mp4Path = await encodeProfileToMP4(
|
||||
input,
|
||||
tempDir,
|
||||
profile,
|
||||
videoCodec,
|
||||
preset,
|
||||
duration,
|
||||
segmentDuration,
|
||||
sourceAudioBitrate,
|
||||
codecType,
|
||||
qualitySettings,
|
||||
optimizations,
|
||||
(percent) => {
|
||||
if (onProgress) {
|
||||
onProgress(profile.name, percent);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
mp4Files.set(profile.name, mp4Path);
|
||||
}
|
||||
}
|
||||
|
||||
return mp4Files;
|
||||
}
|
||||
256
src/core/manifest.ts
Normal file
256
src/core/manifest.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import type { VideoProfile, CodecType } from '../types';
|
||||
|
||||
/**
|
||||
* DASH MPD Manifest Generator
|
||||
* Handles creation and manipulation of MPEG-DASH manifests
|
||||
*/
|
||||
|
||||
/**
|
||||
* Validate and fix MPD manifest XML structure
|
||||
* Ensures all Representation tags are properly closed
|
||||
*/
|
||||
export async function validateAndFixManifest(manifestPath: string): Promise<void> {
|
||||
let mpd = await readFile(manifestPath, 'utf-8');
|
||||
|
||||
// Fix 1: Remove double slashes in self-closing tags: "//> → "/>
|
||||
mpd = mpd.replace(/\/\/>/g, '/>');
|
||||
|
||||
// Fix 2: Fix malformed self-closing tags with extra space: "/ /> → "/>
|
||||
mpd = mpd.replace(/\/\s+\/>/g, '/>');
|
||||
|
||||
// Fix 3: Normalize Representation self-closing tags - remove extra spaces before />
|
||||
mpd = mpd.replace(/(<Representation[^>]+)\s+\/>/g, '$1/>');
|
||||
|
||||
// Fix 4: Remove orphaned closing tags after self-closing Representation tags
|
||||
mpd = mpd.replace(/<Representation\s+([^>]+)\/>\s*<\/Representation>/g, '<Representation $1/>');
|
||||
|
||||
// Fix 5: Convert self-closing Representation tags that have child elements to properly opened tags
|
||||
mpd = mpd.replace(
|
||||
/<Representation\s+([^>]+)\/>\s*(<AudioChannelConfiguration[^>]*\/>)/g,
|
||||
'<Representation $1>\n $2\n </Representation>'
|
||||
);
|
||||
|
||||
// Fix 6: Convert unclosed Representation tags to self-closing (if no children)
|
||||
mpd = mpd.replace(
|
||||
/<Representation\s+([^>]+)>\s*(?=<(?:Representation|\/AdaptationSet))/g,
|
||||
'<Representation $1/>\n'
|
||||
);
|
||||
|
||||
await writeFile(manifestPath, mpd, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update MPD manifest paths to reflect subdirectory structure
|
||||
* Changes: $RepresentationID$_$Number$ → $RepresentationID$/$RepresentationID$_$Number$
|
||||
*/
|
||||
export async function updateManifestPaths(
|
||||
manifestPath: string,
|
||||
profiles: VideoProfile[],
|
||||
codecType: CodecType
|
||||
): Promise<void> {
|
||||
let mpd = await readFile(manifestPath, 'utf-8');
|
||||
|
||||
// MP4Box uses $RepresentationID$ template variable
|
||||
// Replace: media="$RepresentationID$_$Number$.m4s"
|
||||
// With: media="$RepresentationID$/$RepresentationID$_$Number$.m4s"
|
||||
|
||||
mpd = mpd.replace(
|
||||
/media="\$RepresentationID\$_\$Number\$\.m4s"/g,
|
||||
'media="$RepresentationID$/$RepresentationID$_$Number$.m4s"'
|
||||
);
|
||||
|
||||
// Replace: initialization="$RepresentationID$_.mp4"
|
||||
// With: initialization="$RepresentationID$/$RepresentationID$_.mp4"
|
||||
|
||||
mpd = mpd.replace(
|
||||
/initialization="\$RepresentationID\$_\.mp4"/g,
|
||||
'initialization="$RepresentationID$/$RepresentationID$_.mp4"'
|
||||
);
|
||||
|
||||
await writeFile(manifestPath, mpd, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Separate H.264 and AV1 representations into different AdaptationSets
|
||||
* This allows DASH players to prefer AV1 when supported, with H.264 fallback
|
||||
*/
|
||||
export async function separateCodecAdaptationSets(manifestPath: string): Promise<void> {
|
||||
let mpd = await readFile(manifestPath, 'utf-8');
|
||||
|
||||
// Simple string-based approach: look for mixed codec patterns
|
||||
// Find patterns like: <Representation id="XXX-h264"... followed by <Representation id="YYY-av1"...
|
||||
|
||||
const lines = mpd.split('\n');
|
||||
const result: string[] = [];
|
||||
let i = 0;
|
||||
|
||||
while (i < lines.length) {
|
||||
const line = lines[i];
|
||||
|
||||
// Check if this is an AdaptationSet opening tag with video content
|
||||
if (line.includes('<AdaptationSet') && line.includes('maxWidth')) {
|
||||
// Start collecting this AdaptationSet
|
||||
const adaptationSetStart = i;
|
||||
let adaptationSetLines: string[] = [line];
|
||||
let segmentTemplateLines: string[] = [];
|
||||
let h264Reps: string[] = [];
|
||||
let av1Reps: string[] = [];
|
||||
let inSegmentTemplate = false;
|
||||
|
||||
i++;
|
||||
|
||||
// Collect all lines until closing </AdaptationSet>
|
||||
while (i < lines.length && !lines[i].includes('</AdaptationSet>')) {
|
||||
const currentLine = lines[i];
|
||||
|
||||
if (currentLine.includes('<SegmentTemplate')) {
|
||||
inSegmentTemplate = true;
|
||||
}
|
||||
|
||||
if (inSegmentTemplate) {
|
||||
segmentTemplateLines.push(currentLine);
|
||||
if (currentLine.includes('</SegmentTemplate>')) {
|
||||
inSegmentTemplate = false;
|
||||
}
|
||||
} else if (currentLine.includes('<Representation') && currentLine.includes('-h264')) {
|
||||
h264Reps.push(currentLine);
|
||||
} else if (currentLine.includes('<Representation') && currentLine.includes('-av1')) {
|
||||
av1Reps.push(currentLine);
|
||||
}
|
||||
|
||||
i++;
|
||||
}
|
||||
|
||||
// Check if we have both codecs
|
||||
if (h264Reps.length > 0 && av1Reps.length > 0) {
|
||||
// Split into two AdaptationSets
|
||||
|
||||
// H.264 AdaptationSet
|
||||
result.push(line); // Opening tag
|
||||
segmentTemplateLines.forEach(l => result.push(l));
|
||||
h264Reps.forEach(l => result.push(l));
|
||||
result.push(' </AdaptationSet>');
|
||||
|
||||
// AV1 AdaptationSet
|
||||
result.push(line); // Same opening tag
|
||||
segmentTemplateLines.forEach(l => result.push(l));
|
||||
av1Reps.forEach(l => result.push(l));
|
||||
result.push(' </AdaptationSet>');
|
||||
} else {
|
||||
// No mixed codecs, keep original
|
||||
result.push(line);
|
||||
for (let j = adaptationSetStart + 1; j < i; j++) {
|
||||
result.push(lines[j]);
|
||||
}
|
||||
result.push(lines[i]); // closing tag
|
||||
}
|
||||
|
||||
i++;
|
||||
} else {
|
||||
result.push(line);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
await writeFile(manifestPath, result.join('\n'), 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate MPD manifest from scratch (alternative to MP4Box)
|
||||
* TODO: Implement full MPD generation without external tools
|
||||
*/
|
||||
export async function generateMPDManifest(
|
||||
profiles: VideoProfile[],
|
||||
codecType: CodecType,
|
||||
duration: number,
|
||||
segmentDuration: number
|
||||
): Promise<string> {
|
||||
// TODO: Implement manual MPD generation
|
||||
// This will be used when we want full control over manifest
|
||||
throw new Error('Manual MPD generation not yet implemented. Use Bento4 or MP4Box for now.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update HLS master manifest to reflect subdirectory structure
|
||||
*/
|
||||
export async function updateHLSManifestPaths(
|
||||
manifestPath: string,
|
||||
profiles: VideoProfile[]
|
||||
): Promise<void> {
|
||||
let m3u8 = await readFile(manifestPath, 'utf-8');
|
||||
|
||||
// MP4Box uses $RepresentationID$ template variable
|
||||
// Replace: media="$RepresentationID$_$Number$.m4s"
|
||||
// With: media="$RepresentationID$/$RepresentationID$_$Number$.m4s"
|
||||
|
||||
m3u8 = m3u8.replace(
|
||||
/media="\$RepresentationID\$_\$Number\$\.m4s"/g,
|
||||
'media="$RepresentationID$/$RepresentationID$_$Number$.m4s"'
|
||||
);
|
||||
|
||||
// Replace: initialization="$RepresentationID$_.mp4"
|
||||
// With: initialization="$RepresentationID$/$RepresentationID$_.mp4"
|
||||
|
||||
m3u8 = m3u8.replace(
|
||||
/initialization="\$RepresentationID\$_\.mp4"/g,
|
||||
'initialization="$RepresentationID$/$RepresentationID$_.mp4"'
|
||||
);
|
||||
|
||||
await writeFile(manifestPath, m3u8, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate HLS media playlist content
|
||||
*/
|
||||
export function generateHLSMediaPlaylist(
|
||||
segmentFiles: string[],
|
||||
initFile: string,
|
||||
segmentDuration: number
|
||||
): string {
|
||||
let content = '#EXTM3U\n';
|
||||
content += `#EXT-X-VERSION:6\n`;
|
||||
content += `#EXT-X-TARGETDURATION:${Math.ceil(segmentDuration)}\n`;
|
||||
content += `#EXT-X-MEDIA-SEQUENCE:1\n`;
|
||||
content += `#EXT-X-INDEPENDENT-SEGMENTS\n`;
|
||||
content += `#EXT-X-MAP:URI="${initFile}"\n`;
|
||||
|
||||
for (const segmentFile of segmentFiles) {
|
||||
content += `#EXTINF:${segmentDuration},\n`;
|
||||
content += `${segmentFile}\n`;
|
||||
}
|
||||
|
||||
content += `#EXT-X-ENDLIST\n`;
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate HLS master playlist content
|
||||
*/
|
||||
export function generateHLSMasterPlaylist(
|
||||
variants: Array<{ path: string; bandwidth: number; resolution: string; fps: number }>,
|
||||
hasAudio: boolean
|
||||
): string {
|
||||
let content = '#EXTM3U\n';
|
||||
content += '#EXT-X-VERSION:6\n';
|
||||
content += '#EXT-X-INDEPENDENT-SEGMENTS\n\n';
|
||||
|
||||
// Add audio reference
|
||||
if (hasAudio) {
|
||||
content += `#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",NAME="audio",AUTOSELECT=YES,URI="audio/playlist.m3u8",CHANNELS="2"\n\n`;
|
||||
}
|
||||
|
||||
// Add video variants
|
||||
for (const variant of variants) {
|
||||
const codecs = hasAudio ? 'avc1.4D4020,mp4a.40.2' : 'avc1.4D4020';
|
||||
content += `#EXT-X-STREAM-INF:BANDWIDTH=${variant.bandwidth},CODECS="${codecs}",RESOLUTION=${variant.resolution},FRAME-RATE=${variant.fps}`;
|
||||
if (hasAudio) {
|
||||
content += `,AUDIO="audio"`;
|
||||
}
|
||||
content += `\n`;
|
||||
content += `${variant.path}\n\n`;
|
||||
}
|
||||
|
||||
return content;
|
||||
}
|
||||
395
src/core/packaging.ts
Normal file
395
src/core/packaging.ts
Normal file
@@ -0,0 +1,395 @@
|
||||
import { join } from 'node:path';
|
||||
import { execMP4Box } from '../utils';
|
||||
import type { VideoProfile, CodecType, StreamingFormat } from '../types';
|
||||
import { readdir, rename, mkdir, writeFile } from 'node:fs/promises';
|
||||
import {
|
||||
validateAndFixManifest,
|
||||
updateManifestPaths,
|
||||
separateCodecAdaptationSets,
|
||||
updateHLSManifestPaths,
|
||||
generateHLSMediaPlaylist,
|
||||
generateHLSMasterPlaylist
|
||||
} from './manifest';
|
||||
|
||||
/**
|
||||
* Package MP4 files into DASH format using MP4Box
|
||||
* Stage 2: Light work - just packaging, no encoding
|
||||
* Creates one master MPD manifest with all profiles and codecs
|
||||
*/
|
||||
export async function packageToDash(
|
||||
codecMP4Files: Map<'h264' | 'av1', Map<string, string>>,
|
||||
outputDir: string,
|
||||
profiles: VideoProfile[],
|
||||
segmentDuration: number,
|
||||
codecType: CodecType,
|
||||
hasAudio: boolean
|
||||
): Promise<string> {
|
||||
const manifestPath = join(outputDir, 'manifest.mpd');
|
||||
|
||||
// Build MP4Box command
|
||||
const args = [
|
||||
'-dash', String(segmentDuration * 1000), // MP4Box expects milliseconds
|
||||
'-frag', String(segmentDuration * 1000),
|
||||
'-rap', // Force segments to start with random access points
|
||||
'-segment-timeline', // Use SegmentTimeline for accurate segment durations
|
||||
'-segment-name', '$RepresentationID$_$Number$',
|
||||
'-out', manifestPath
|
||||
];
|
||||
|
||||
// Add all MP4 files for each codec
|
||||
let firstFile = true;
|
||||
|
||||
for (const [codec, mp4Files] of codecMP4Files.entries()) {
|
||||
for (const profile of profiles) {
|
||||
const mp4Path = mp4Files.get(profile.name);
|
||||
if (!mp4Path) {
|
||||
throw new Error(`MP4 file not found for profile: ${profile.name}, codec: ${codec}`);
|
||||
}
|
||||
|
||||
// Representation ID includes codec: e.g., "720p-h264", "720p-av1"
|
||||
const representationId = codecType === 'dual' ? `${profile.name}-${codec}` : profile.name;
|
||||
|
||||
// Add video track with representation ID
|
||||
args.push(`${mp4Path}#video:id=${representationId}`);
|
||||
|
||||
// Add audio track only once (shared across all profiles and codecs)
|
||||
if (firstFile && hasAudio) {
|
||||
args.push(`${mp4Path}#audio:id=audio`);
|
||||
firstFile = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Execute MP4Box
|
||||
// Note: We separate codecs into different AdaptationSets manually via separateCodecAdaptationSets()
|
||||
await execMP4Box(args);
|
||||
|
||||
// MP4Box creates files in the same directory as output MPD
|
||||
// Move segment files to profile subdirectories for clean structure
|
||||
await organizeSegments(outputDir, profiles, codecType, hasAudio);
|
||||
|
||||
// Update MPD to reflect new file structure with subdirectories
|
||||
await updateManifestPaths(manifestPath, profiles, codecType);
|
||||
|
||||
// For dual-codec mode, separate H.264 and AV1 into different AdaptationSets
|
||||
if (codecType === 'dual') {
|
||||
await separateCodecAdaptationSets(manifestPath);
|
||||
}
|
||||
|
||||
// Validate and fix XML structure (ensure all tags are properly closed)
|
||||
await validateAndFixManifest(manifestPath);
|
||||
|
||||
return manifestPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Organize segments into profile subdirectories
|
||||
* MP4Box creates all files in one directory, we organize them
|
||||
*/
|
||||
async function organizeSegments(
|
||||
outputDir: string,
|
||||
profiles: VideoProfile[],
|
||||
codecType: CodecType,
|
||||
hasAudio: boolean
|
||||
): Promise<void> {
|
||||
const { readdir, rename, mkdir } = await import('node:fs/promises');
|
||||
|
||||
// For dual-codec mode, create codec-specific subdirectories (e.g., "720p-h264/", "720p-av1/")
|
||||
// For single-codec mode, use simple profile names (e.g., "720p/")
|
||||
const codecs: Array<'h264' | 'av1'> = [];
|
||||
if (codecType === 'h264' || codecType === 'dual') codecs.push('h264');
|
||||
if (codecType === 'av1' || codecType === 'dual') codecs.push('av1');
|
||||
|
||||
const representationIds: string[] = [];
|
||||
|
||||
for (const codec of codecs) {
|
||||
for (const profile of profiles) {
|
||||
const repId = codecType === 'dual' ? `${profile.name}-${codec}` : profile.name;
|
||||
representationIds.push(repId);
|
||||
|
||||
const profileDir = join(outputDir, repId);
|
||||
await mkdir(profileDir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
// Create audio subdirectory
|
||||
const audioDir = join(outputDir, 'audio');
|
||||
if (hasAudio) {
|
||||
await mkdir(audioDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Get all files in output directory
|
||||
const files = await readdir(outputDir);
|
||||
|
||||
// Move segment files to their respective directories
|
||||
for (const file of files) {
|
||||
// Skip manifest
|
||||
if (file === 'manifest.mpd') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Move audio files to audio/ directory
|
||||
if (hasAudio && (file.startsWith('audio_') || file === 'audio_init.m4s')) {
|
||||
const oldPath = join(outputDir, file);
|
||||
const newPath = join(audioDir, file);
|
||||
await rename(oldPath, newPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Move video segment files to their representation directories
|
||||
for (const repId of representationIds) {
|
||||
if (file.startsWith(`${repId}_`)) {
|
||||
const oldPath = join(outputDir, file);
|
||||
const newPath = join(outputDir, repId, file);
|
||||
await rename(oldPath, newPath);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Package MP4 files into HLS format using MP4Box
|
||||
* Stage 2: Light work - just packaging, no encoding
|
||||
* Creates master.m3u8 playlist with H.264 profiles only (for Safari/iOS compatibility)
|
||||
*/
|
||||
export async function packageToHLS(
|
||||
codecMP4Files: Map<'h264' | 'av1', Map<string, string>>,
|
||||
outputDir: string,
|
||||
profiles: VideoProfile[],
|
||||
segmentDuration: number,
|
||||
codecType: CodecType
|
||||
): Promise<string> {
|
||||
const manifestPath = join(outputDir, 'master.m3u8');
|
||||
|
||||
// Build MP4Box command for HLS
|
||||
const args = [
|
||||
'-dash', String(segmentDuration * 1000), // MP4Box expects milliseconds
|
||||
'-frag', String(segmentDuration * 1000),
|
||||
'-rap', // Force segments to start with random access points
|
||||
'-segment-timeline', // Use SegmentTimeline for accurate segment durations
|
||||
'-segment-name', '$RepresentationID$_$Number$',
|
||||
'-profile', 'live', // HLS mode instead of DASH
|
||||
'-out', manifestPath
|
||||
];
|
||||
|
||||
// For HLS, use only H.264 codec (Safari/iOS compatibility)
|
||||
const h264Files = codecMP4Files.get('h264');
|
||||
|
||||
if (!h264Files) {
|
||||
throw new Error('H.264 codec files not found. HLS requires H.264 for Safari/iOS compatibility.');
|
||||
}
|
||||
|
||||
let firstFile = true;
|
||||
|
||||
for (const profile of profiles) {
|
||||
const mp4Path = h264Files.get(profile.name);
|
||||
if (!mp4Path) {
|
||||
throw new Error(`MP4 file not found for profile: ${profile.name}, codec: h264`);
|
||||
}
|
||||
|
||||
// Representation ID for HLS (no codec suffix since we only use H.264)
|
||||
const representationId = profile.name;
|
||||
|
||||
// Add video track with representation ID
|
||||
args.push(`${mp4Path}#video:id=${representationId}`);
|
||||
|
||||
// Add audio track only once (shared across all profiles)
|
||||
if (firstFile) {
|
||||
args.push(`${mp4Path}#audio:id=audio`);
|
||||
firstFile = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Execute MP4Box
|
||||
await execMP4Box(args);
|
||||
|
||||
// MP4Box creates files in the same directory as output manifest
|
||||
// Move segment files to profile subdirectories for clean structure
|
||||
await organizeSegmentsHLS(outputDir, profiles);
|
||||
|
||||
// Update manifest to reflect new file structure with subdirectories
|
||||
await updateHLSManifestPaths(manifestPath, profiles);
|
||||
|
||||
return manifestPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Organize HLS segments into profile subdirectories
|
||||
* HLS only uses H.264, so no codec suffix in directory names
|
||||
*/
|
||||
async function organizeSegmentsHLS(
|
||||
outputDir: string,
|
||||
profiles: VideoProfile[]
|
||||
): Promise<void> {
|
||||
const representationIds: string[] = [];
|
||||
|
||||
for (const profile of profiles) {
|
||||
const repId = profile.name; // Just profile name, no codec
|
||||
representationIds.push(repId);
|
||||
|
||||
const profileDir = join(outputDir, repId);
|
||||
await mkdir(profileDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create audio subdirectory
|
||||
const audioDir = join(outputDir, 'audio');
|
||||
await mkdir(audioDir, { recursive: true });
|
||||
|
||||
// Get all files in output directory
|
||||
const files = await readdir(outputDir);
|
||||
|
||||
// Move segment files to their respective directories
|
||||
for (const file of files) {
|
||||
// Skip manifest
|
||||
if (file === 'master.m3u8') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Move audio files to audio/ directory
|
||||
if (file.startsWith('audio_') || file === 'audio_init.m4s') {
|
||||
const oldPath = join(outputDir, file);
|
||||
const newPath = join(audioDir, file);
|
||||
await rename(oldPath, newPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Move video segment files to their representation directories
|
||||
for (const repId of representationIds) {
|
||||
if (file.startsWith(`${repId}_`)) {
|
||||
const oldPath = join(outputDir, file);
|
||||
const newPath = join(outputDir, repId, file);
|
||||
await rename(oldPath, newPath);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unified packaging: creates segments once and generates both DASH and HLS manifests
|
||||
* No duplication - segments stored in {profile}-{codec}/ folders
|
||||
*/
|
||||
export async function packageToFormats(
|
||||
codecMP4Files: Map<'h264' | 'av1', Map<string, string>>,
|
||||
outputDir: string,
|
||||
profiles: VideoProfile[],
|
||||
segmentDuration: number,
|
||||
codec: CodecType,
|
||||
format: StreamingFormat,
|
||||
hasAudio: boolean
|
||||
): Promise<{ manifestPath?: string; hlsManifestPath?: string }> {
|
||||
|
||||
let manifestPath: string | undefined;
|
||||
let hlsManifestPath: string | undefined;
|
||||
|
||||
// Step 1: Generate DASH segments and manifest using MP4Box
|
||||
if (format === 'dash' || format === 'both') {
|
||||
manifestPath = await packageToDash(codecMP4Files, outputDir, profiles, segmentDuration, codec, hasAudio);
|
||||
}
|
||||
|
||||
// Step 2: Generate HLS playlists from existing segments
|
||||
if (format === 'hls' || format === 'both') {
|
||||
// HLS generation from segments
|
||||
hlsManifestPath = await generateHLSPlaylists(
|
||||
outputDir,
|
||||
profiles,
|
||||
segmentDuration,
|
||||
codec,
|
||||
hasAudio
|
||||
);
|
||||
}
|
||||
|
||||
return { manifestPath, hlsManifestPath };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate HLS playlists (media playlists in folders + master in root)
|
||||
*/
|
||||
async function generateHLSPlaylists(
|
||||
outputDir: string,
|
||||
profiles: VideoProfile[],
|
||||
segmentDuration: number,
|
||||
codecType: CodecType,
|
||||
hasAudio: boolean
|
||||
): Promise<string> {
|
||||
const masterPlaylistPath = join(outputDir, 'master.m3u8');
|
||||
const variants: Array<{ path: string; bandwidth: number; resolution: string; fps: number }> = [];
|
||||
|
||||
// Generate media playlist for each H.264 profile
|
||||
for (const profile of profiles) {
|
||||
const profileDir = codecType === 'dual' ? `${profile.name}-h264` : profile.name;
|
||||
const profilePath = join(outputDir, profileDir);
|
||||
|
||||
// Read segment files from profile directory
|
||||
const files = await readdir(profilePath);
|
||||
const segmentFiles = files
|
||||
.filter(f => f.endsWith('.m4s'))
|
||||
.sort((a, b) => {
|
||||
const numA = parseInt(a.match(/_(\d+)\.m4s$/)?.[1] || '0');
|
||||
const numB = parseInt(b.match(/_(\d+)\.m4s$/)?.[1] || '0');
|
||||
return numA - numB;
|
||||
});
|
||||
|
||||
const initFile = files.find(f => f.endsWith('_.mp4'));
|
||||
|
||||
if (!initFile || segmentFiles.length === 0) {
|
||||
continue; // Skip if no segments found
|
||||
}
|
||||
|
||||
// Generate media playlist content using manifest module
|
||||
const playlistContent = generateHLSMediaPlaylist(segmentFiles, initFile, segmentDuration);
|
||||
|
||||
// Write media playlist
|
||||
const playlistPath = join(profilePath, 'playlist.m3u8');
|
||||
await writeFile(playlistPath, playlistContent, 'utf-8');
|
||||
|
||||
// Add to variants list
|
||||
const bandwidth = parseInt(profile.videoBitrate) * 1000;
|
||||
variants.push({
|
||||
path: `${profileDir}/playlist.m3u8`,
|
||||
bandwidth,
|
||||
resolution: `${profile.width}x${profile.height}`,
|
||||
fps: profile.fps || 30
|
||||
});
|
||||
}
|
||||
|
||||
// Generate audio media playlist (only if source has audio)
|
||||
let audioInit: string | undefined;
|
||||
let audioSegments: string[] = [];
|
||||
|
||||
if (hasAudio) {
|
||||
const audioDir = join(outputDir, 'audio');
|
||||
let audioFiles: string[] = [];
|
||||
|
||||
try {
|
||||
audioFiles = await readdir(audioDir);
|
||||
} catch {
|
||||
audioFiles = [];
|
||||
}
|
||||
|
||||
audioSegments = audioFiles
|
||||
.filter(f => f.endsWith('.m4s'))
|
||||
.sort((a, b) => {
|
||||
const numA = parseInt(a.match(/_(\d+)\.m4s$/)?.[1] || '0');
|
||||
const numB = parseInt(b.match(/_(\d+)\.m4s$/)?.[1] || '0');
|
||||
return numA - numB;
|
||||
});
|
||||
|
||||
audioInit = audioFiles.find(f => f.endsWith('_.mp4'));
|
||||
|
||||
if (audioInit && audioSegments.length > 0) {
|
||||
const audioPlaylistContent = generateHLSMediaPlaylist(audioSegments, audioInit, segmentDuration);
|
||||
await writeFile(join(audioDir, 'playlist.m3u8'), audioPlaylistContent, 'utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
// Generate master playlist using manifest module
|
||||
const masterContent = generateHLSMasterPlaylist(
|
||||
variants,
|
||||
hasAudio && audioInit !== undefined && audioSegments.length > 0
|
||||
);
|
||||
await writeFile(masterPlaylistPath, masterContent, 'utf-8');
|
||||
|
||||
return masterPlaylistPath;
|
||||
}
|
||||
139
src/core/thumbnails.ts
Normal file
139
src/core/thumbnails.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { join } from 'node:path';
|
||||
import type { ThumbnailConfig } from '../types';
|
||||
import { execFFmpeg, formatVttTime, ensureDir } from '../utils';
|
||||
import { readdir, unlink, rmdir, writeFile } from 'node:fs/promises';
|
||||
|
||||
/**
|
||||
* Generate poster image from video at specific timecode
|
||||
* @param inputPath - Path to input video
|
||||
* @param outputDir - Directory to save poster
|
||||
* @param timecode - Timecode in format "HH:MM:SS" or seconds (default: "00:00:01")
|
||||
* @returns Path to generated poster
|
||||
*/
|
||||
export async function generatePoster(
|
||||
inputPath: string,
|
||||
outputDir: string,
|
||||
timecode: string = '00:00:00'
|
||||
): Promise<string> {
|
||||
const posterPath = join(outputDir, 'poster.jpg');
|
||||
|
||||
// Parse timecode: if it's a number, treat as seconds, otherwise use as-is
|
||||
const timeArg = /^\d+(\.\d+)?$/.test(timecode) ? timecode : timecode;
|
||||
|
||||
await execFFmpeg([
|
||||
'-ss', timeArg, // Seek to timecode
|
||||
'-i', inputPath, // Input file
|
||||
'-vframes', '1', // Extract 1 frame
|
||||
'-q:v', '2', // High quality (2-5 range, 2 is best)
|
||||
'-y', // Overwrite output
|
||||
posterPath
|
||||
]);
|
||||
|
||||
return posterPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate thumbnail sprite and VTT file
|
||||
*/
|
||||
export async function generateThumbnailSprite(
|
||||
inputPath: string,
|
||||
outputDir: string,
|
||||
duration: number,
|
||||
config: Required<ThumbnailConfig>
|
||||
): Promise<{ spritePath: string; vttPath: string }> {
|
||||
const { width, height, interval, columns } = config;
|
||||
|
||||
// Create temp directory for individual thumbnails
|
||||
const tempDir = join(outputDir, '.thumbnails_temp');
|
||||
await ensureDir(tempDir);
|
||||
await writeFile(join(tempDir, '.keep'), '');
|
||||
|
||||
// Generate individual thumbnails
|
||||
const thumbnailPattern = join(tempDir, 'thumb_%04d.jpg');
|
||||
|
||||
await execFFmpeg([
|
||||
'-i', inputPath,
|
||||
'-vf', `fps=1/${interval},scale=${width}:${height}`,
|
||||
'-q:v', '5',
|
||||
thumbnailPattern
|
||||
]);
|
||||
|
||||
// Get list of generated thumbnails
|
||||
const files = await readdir(tempDir);
|
||||
const thumbFiles = files
|
||||
.filter(f => f.startsWith('thumb_') && f.endsWith('.jpg'))
|
||||
.sort();
|
||||
|
||||
if (thumbFiles.length === 0) {
|
||||
throw new Error('No thumbnails generated');
|
||||
}
|
||||
|
||||
// Calculate grid dimensions
|
||||
const totalThumbs = thumbFiles.length;
|
||||
const rows = Math.ceil(totalThumbs / columns);
|
||||
|
||||
// Create sprite sheet using FFmpeg
|
||||
const spritePath = join(outputDir, 'thumbnails.jpg');
|
||||
|
||||
// Use pattern input for tile filter (not multiple -i inputs)
|
||||
const tileFilter = `tile=${columns}x${rows}`;
|
||||
|
||||
await execFFmpeg([
|
||||
'-i', thumbnailPattern, // Use pattern, not individual files
|
||||
'-filter_complex', tileFilter,
|
||||
'-q:v', '5',
|
||||
spritePath
|
||||
]);
|
||||
|
||||
// Generate VTT file
|
||||
const vttPath = join(outputDir, 'thumbnails.vtt');
|
||||
const vttContent = generateVttContent(
|
||||
totalThumbs,
|
||||
interval,
|
||||
width,
|
||||
height,
|
||||
columns,
|
||||
'thumbnails.jpg'
|
||||
);
|
||||
|
||||
await writeFile(vttPath, vttContent);
|
||||
|
||||
// Clean up temp files
|
||||
for (const file of thumbFiles) {
|
||||
await unlink(join(tempDir, file));
|
||||
}
|
||||
await unlink(join(tempDir, '.keep'));
|
||||
await rmdir(tempDir); // Remove directory
|
||||
|
||||
return { spritePath, vttPath };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate VTT file content
|
||||
*/
|
||||
function generateVttContent(
|
||||
totalThumbs: number,
|
||||
interval: number,
|
||||
thumbWidth: number,
|
||||
thumbHeight: number,
|
||||
columns: number,
|
||||
spriteFilename: string
|
||||
): string {
|
||||
let vtt = 'WEBVTT\n\n';
|
||||
|
||||
for (let i = 0; i < totalThumbs; i++) {
|
||||
const startTime = i * interval;
|
||||
const endTime = (i + 1) * interval;
|
||||
|
||||
const row = Math.floor(i / columns);
|
||||
const col = i % columns;
|
||||
|
||||
const x = col * thumbWidth;
|
||||
const y = row * thumbHeight;
|
||||
|
||||
vtt += `${formatVttTime(startTime)} --> ${formatVttTime(endTime)}\n`;
|
||||
vtt += `${spriteFilename}#xywh=${x},${y},${thumbWidth},${thumbHeight}\n\n`;
|
||||
}
|
||||
|
||||
return vtt;
|
||||
}
|
||||
31
src/index.ts
Normal file
31
src/index.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
// Main exports
|
||||
export { convertToDash } from './core/converter';
|
||||
|
||||
// Type exports
|
||||
export type {
|
||||
DashConvertOptions,
|
||||
DashConvertResult,
|
||||
VideoProfile,
|
||||
ThumbnailConfig,
|
||||
ConversionProgress,
|
||||
VideoMetadata,
|
||||
VideoOptimizations,
|
||||
CodecType,
|
||||
HardwareAccelerator,
|
||||
HardwareAccelerationOption,
|
||||
HardwareEncoderInfo
|
||||
} from './types';
|
||||
|
||||
// Utility exports
|
||||
export {
|
||||
checkFFmpeg,
|
||||
checkMP4Box,
|
||||
checkNvenc,
|
||||
checkAV1Support,
|
||||
getVideoMetadata,
|
||||
selectAudioBitrate,
|
||||
detectHardwareEncoders
|
||||
} from './utils';
|
||||
|
||||
// Profile exports
|
||||
export { DEFAULT_PROFILES, selectProfiles } from './config/profiles';
|
||||
238
src/types/index.ts
Normal file
238
src/types/index.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
/**
|
||||
* Video codec type for encoding
|
||||
*/
|
||||
export type CodecType = 'av1' | 'h264' | 'dual';
|
||||
|
||||
/**
|
||||
* Streaming format type
|
||||
*/
|
||||
export type StreamingFormat = 'dash' | 'hls' | 'both';
|
||||
|
||||
/**
|
||||
* Тип аппаратного ускорителя
|
||||
*/
|
||||
export type HardwareAccelerator = 'nvenc' | 'qsv' | 'amf' | 'vaapi' | 'videotoolbox' | 'v4l2' | 'cpu';
|
||||
|
||||
/**
|
||||
* Опция выбора ускорителя (конкретный или auto)
|
||||
*/
|
||||
export type HardwareAccelerationOption = HardwareAccelerator | 'auto';
|
||||
|
||||
/**
|
||||
* Набор доступных энкодеров для конкретного ускорителя
|
||||
*/
|
||||
export interface HardwareEncoderInfo {
|
||||
accelerator: HardwareAccelerator;
|
||||
h264Encoder?: string;
|
||||
av1Encoder?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quality settings for a codec
|
||||
*/
|
||||
export interface CodecQualitySettings {
|
||||
/** CQ (Constant Quality) for GPU encoders (0-51, lower = better quality) */
|
||||
cq?: number;
|
||||
|
||||
/** CRF (Constant Rate Factor) for CPU encoders (0-51 for h264, 0-63 for av1, lower = better quality) */
|
||||
crf?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Quality settings for video encoding
|
||||
*/
|
||||
export interface QualitySettings {
|
||||
/** Quality settings for H.264 codec */
|
||||
h264?: CodecQualitySettings;
|
||||
|
||||
/** Quality settings for AV1 codec */
|
||||
av1?: CodecQualitySettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration options for DASH conversion
|
||||
*/
|
||||
export interface DashConvertOptions {
|
||||
/** Input video file path */
|
||||
input: string;
|
||||
|
||||
/** Output directory path */
|
||||
outputDir: string;
|
||||
|
||||
/** Segment duration in seconds (default: 2) */
|
||||
segmentDuration?: number;
|
||||
|
||||
/** Video quality profiles to generate */
|
||||
profiles?: VideoProfile[];
|
||||
|
||||
/** Custom resolution profiles as strings (e.g., ['360p', '480p', '720p@60']) */
|
||||
customProfiles?: string[];
|
||||
|
||||
/** Video codec to use: 'av1', 'h264', or 'dual' for both (default: 'dual') */
|
||||
codec?: CodecType;
|
||||
|
||||
/** Streaming format to generate: 'dash', 'hls', or 'both' (default: 'both') */
|
||||
format?: StreamingFormat;
|
||||
|
||||
/** Enable NVENC hardware acceleration (auto-detect if undefined) — устарело, используйте hardwareAccelerator */
|
||||
useNvenc?: boolean;
|
||||
|
||||
/** Предпочитаемый аппаратный ускоритель (auto по умолчанию) */
|
||||
hardwareAccelerator?: HardwareAccelerationOption;
|
||||
|
||||
/** Quality settings for video encoding (CQ/CRF values) */
|
||||
quality?: QualitySettings;
|
||||
|
||||
/** Generate thumbnail sprite (default: true) */
|
||||
generateThumbnails?: boolean;
|
||||
|
||||
/** Thumbnail sprite configuration */
|
||||
thumbnailConfig?: ThumbnailConfig;
|
||||
|
||||
/** Generate poster image (default: true) */
|
||||
generatePoster?: boolean;
|
||||
|
||||
/** Poster timecode in format HH:MM:SS or seconds (default: 00:00:01) */
|
||||
posterTimecode?: string;
|
||||
|
||||
/** Parallel encoding (default: true) */
|
||||
parallel?: boolean;
|
||||
|
||||
/** Callback for progress updates */
|
||||
onProgress?: (progress: ConversionProgress) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Video quality profile
|
||||
*/
|
||||
export interface VideoProfile {
|
||||
/** Profile name (e.g., "1080p", "720p") */
|
||||
name: string;
|
||||
|
||||
/** Video width in pixels */
|
||||
width: number;
|
||||
|
||||
/** Video height in pixels */
|
||||
height: number;
|
||||
|
||||
/** Video bitrate (e.g., "5000k") */
|
||||
videoBitrate: string;
|
||||
|
||||
/** Audio bitrate (e.g., "128k") */
|
||||
audioBitrate: string;
|
||||
|
||||
/** Target FPS for this profile (default: 30) */
|
||||
fps?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Thumbnail sprite configuration
|
||||
*/
|
||||
export interface ThumbnailConfig {
|
||||
/** Width of each thumbnail (default: 160) */
|
||||
width?: number;
|
||||
|
||||
/** Height of each thumbnail (default: 90) */
|
||||
height?: number;
|
||||
|
||||
/** Interval between thumbnails in seconds (default: 1) */
|
||||
interval?: number;
|
||||
|
||||
/** Number of thumbnails per row (default: 10) */
|
||||
columns?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Conversion progress information
|
||||
*/
|
||||
export interface ConversionProgress {
|
||||
/** Current stage of conversion */
|
||||
stage: 'analyzing' | 'encoding' | 'thumbnails' | 'manifest' | 'complete';
|
||||
|
||||
/** Progress percentage (0-100) - overall progress */
|
||||
percent: number;
|
||||
|
||||
/** Current profile being processed */
|
||||
currentProfile?: string;
|
||||
|
||||
/** Progress percentage for current profile (0-100) */
|
||||
profilePercent?: number;
|
||||
|
||||
/** Additional message */
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of DASH conversion
|
||||
*/
|
||||
export interface DashConvertResult {
|
||||
/** Path to generated DASH manifest (if format is 'dash' or 'both') */
|
||||
manifestPath?: string;
|
||||
|
||||
/** Path to generated HLS manifest (if format is 'hls' or 'both') */
|
||||
hlsManifestPath?: string;
|
||||
|
||||
/** Paths to generated video segments */
|
||||
videoPaths: string[];
|
||||
|
||||
/** Path to thumbnail sprite (if generated) */
|
||||
thumbnailSpritePath?: string;
|
||||
|
||||
/** Path to thumbnail VTT file (if generated) */
|
||||
thumbnailVttPath?: string;
|
||||
|
||||
/** Path to poster image (if generated) */
|
||||
posterPath?: string;
|
||||
|
||||
/** Video duration in seconds */
|
||||
duration: number;
|
||||
|
||||
/** Generated profiles */
|
||||
profiles: VideoProfile[];
|
||||
|
||||
/** Whether NVENC was used */
|
||||
usedNvenc: boolean;
|
||||
|
||||
/** Выбранный аппаратный ускоритель */
|
||||
selectedAccelerator: HardwareAccelerator;
|
||||
|
||||
/** Codec type used for encoding */
|
||||
codecType: CodecType;
|
||||
|
||||
/** Streaming format generated */
|
||||
format: StreamingFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Video metadata
|
||||
*/
|
||||
export interface VideoMetadata {
|
||||
width: number;
|
||||
height: number;
|
||||
duration: number;
|
||||
fps: number;
|
||||
codec: string;
|
||||
hasAudio: boolean; // Есть ли аудиодорожка
|
||||
audioBitrate?: number; // Битрейт аудио в kbps
|
||||
videoBitrate?: number; // Битрейт видео в kbps
|
||||
}
|
||||
|
||||
/**
|
||||
* Video optimizations (for future use)
|
||||
*/
|
||||
export interface VideoOptimizations {
|
||||
/** Apply deinterlacing */
|
||||
deinterlace?: boolean;
|
||||
|
||||
/** Apply denoising filter */
|
||||
denoise?: boolean;
|
||||
|
||||
/** Color correction / LUT file path */
|
||||
colorCorrection?: string;
|
||||
|
||||
/** Audio normalization */
|
||||
audioNormalize?: boolean;
|
||||
|
||||
/** Custom FFmpeg filters */
|
||||
customFilters?: string[];
|
||||
}
|
||||
13
src/utils/fs.ts
Normal file
13
src/utils/fs.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { mkdir, access, constants } from 'node:fs/promises';
|
||||
|
||||
/**
|
||||
* Ensure directory exists
|
||||
*/
|
||||
export async function ensureDir(dirPath: string): Promise<void> {
|
||||
try {
|
||||
await access(dirPath, constants.F_OK);
|
||||
} catch {
|
||||
await mkdir(dirPath, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
23
src/utils/index.ts
Normal file
23
src/utils/index.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
// System utilities
|
||||
export {
|
||||
checkFFmpeg,
|
||||
checkMP4Box,
|
||||
checkNvenc,
|
||||
checkAV1Support,
|
||||
detectHardwareEncoders,
|
||||
execFFmpeg,
|
||||
execMP4Box,
|
||||
setLogFile
|
||||
} from './system';
|
||||
|
||||
// Video utilities
|
||||
export {
|
||||
getVideoMetadata,
|
||||
selectAudioBitrate,
|
||||
formatVttTime
|
||||
} from './video';
|
||||
|
||||
// File system utilities
|
||||
export {
|
||||
ensureDir
|
||||
} from './fs';
|
||||
261
src/utils/system.ts
Normal file
261
src/utils/system.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import { appendFile } from 'node:fs/promises';
|
||||
import type { HardwareAccelerator, HardwareEncoderInfo } from '../types';
|
||||
|
||||
// Global variable for log file path
|
||||
let currentLogFile: string | null = null;
|
||||
|
||||
/**
|
||||
* Set log file path for FFmpeg and MP4Box output
|
||||
*/
|
||||
export function setLogFile(logPath: string): void {
|
||||
currentLogFile = logPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append log entry to file
|
||||
*/
|
||||
async function appendLog(entry: string): Promise<void> {
|
||||
if (currentLogFile) {
|
||||
try {
|
||||
await appendFile(currentLogFile, entry, 'utf-8');
|
||||
} catch (err) {
|
||||
// Silently ignore log errors to not break conversion
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if FFmpeg is available
|
||||
*/
|
||||
export async function checkFFmpeg(): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const proc = spawn('ffmpeg', ['-version']);
|
||||
proc.on('error', () => resolve(false));
|
||||
proc.on('close', (code) => resolve(code === 0));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if MP4Box is available
|
||||
*/
|
||||
export async function checkMP4Box(): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const proc = spawn('MP4Box', ['-version']);
|
||||
proc.on('error', () => resolve(false));
|
||||
proc.on('close', (code) => resolve(code === 0));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Bento4 mp4dash is available
|
||||
*/
|
||||
export async function checkBento4(): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const proc = spawn('mp4dash', ['--version']);
|
||||
proc.on('error', () => resolve(false));
|
||||
proc.on('close', (code) => resolve(code === 0));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if NVENC is available
|
||||
*/
|
||||
export async function checkNvenc(): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const proc = spawn('ffmpeg', ['-hide_banner', '-encoders']);
|
||||
let output = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
proc.on('error', () => resolve(false));
|
||||
proc.on('close', (code) => {
|
||||
if (code !== 0) {
|
||||
resolve(false);
|
||||
} else {
|
||||
resolve(output.includes('h264_nvenc') || output.includes('hevc_nvenc'));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if AV1 hardware encoding is available
|
||||
* Supports: NVENC (RTX 40xx), QSV (Intel 11+), AMF (AMD RX 7000)
|
||||
*/
|
||||
export async function checkAV1Support(): Promise<{
|
||||
available: boolean;
|
||||
encoder?: 'av1_nvenc' | 'av1_qsv' | 'av1_amf';
|
||||
}> {
|
||||
return new Promise((resolve) => {
|
||||
const proc = spawn('ffmpeg', ['-hide_banner', '-encoders']);
|
||||
let output = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
proc.on('error', () => resolve({ available: false }));
|
||||
proc.on('close', (code) => {
|
||||
if (code !== 0) {
|
||||
resolve({ available: false });
|
||||
} else {
|
||||
// Check for hardware AV1 encoders in order of preference
|
||||
if (output.includes('av1_nvenc')) {
|
||||
resolve({ available: true, encoder: 'av1_nvenc' });
|
||||
} else if (output.includes('av1_qsv')) {
|
||||
resolve({ available: true, encoder: 'av1_qsv' });
|
||||
} else if (output.includes('av1_amf')) {
|
||||
resolve({ available: true, encoder: 'av1_amf' });
|
||||
} else {
|
||||
resolve({ available: false });
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Получить список доступных аппаратных энкодеров (по выводу ffmpeg -encoders)
|
||||
*/
|
||||
export async function detectHardwareEncoders(): Promise<HardwareEncoderInfo[]> {
|
||||
const encodersOutput: string = await new Promise((resolve) => {
|
||||
const proc = spawn('ffmpeg', ['-hide_banner', '-encoders']);
|
||||
let output = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
proc.on('error', () => resolve(''));
|
||||
proc.on('close', () => resolve(output));
|
||||
});
|
||||
|
||||
const has = (name: string) => encodersOutput.includes(name);
|
||||
|
||||
const detected: HardwareEncoderInfo[] = [];
|
||||
|
||||
const accelerators: Array<{ acc: HardwareAccelerator; h264?: string; av1?: string }> = [
|
||||
{ acc: 'nvenc', h264: has('h264_nvenc') ? 'h264_nvenc' : undefined, av1: has('av1_nvenc') ? 'av1_nvenc' : undefined },
|
||||
{ acc: 'qsv', h264: has('h264_qsv') ? 'h264_qsv' : undefined, av1: has('av1_qsv') ? 'av1_qsv' : undefined },
|
||||
{ acc: 'amf', h264: has('h264_amf') ? 'h264_amf' : undefined, av1: has('av1_amf') ? 'av1_amf' : undefined },
|
||||
{ acc: 'vaapi', h264: has('h264_vaapi') ? 'h264_vaapi' : undefined, av1: has('av1_vaapi') ? 'av1_vaapi' : undefined },
|
||||
{ acc: 'videotoolbox', h264: has('h264_videotoolbox') ? 'h264_videotoolbox' : undefined, av1: has('av1_videotoolbox') ? 'av1_videotoolbox' : undefined },
|
||||
{ acc: 'v4l2', h264: has('h264_v4l2m2m') ? 'h264_v4l2m2m' : undefined, av1: has('av1_v4l2m2m') ? 'av1_v4l2m2m' : undefined }
|
||||
];
|
||||
|
||||
for (const item of accelerators) {
|
||||
if (item.h264 || item.av1) {
|
||||
detected.push({
|
||||
accelerator: item.acc,
|
||||
h264Encoder: item.h264,
|
||||
av1Encoder: item.av1
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return detected;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute FFmpeg command with progress tracking
|
||||
*/
|
||||
export async function execFFmpeg(
|
||||
args: string[],
|
||||
onProgress?: (percent: number) => void,
|
||||
duration?: number
|
||||
): Promise<void> {
|
||||
const timestamp = new Date().toISOString();
|
||||
const commandLog = `\n=== FFmpeg Command [${timestamp}] ===\nffmpeg ${args.join(' ')}\n`;
|
||||
await appendLog(commandLog);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const proc = spawn('ffmpeg', args);
|
||||
|
||||
let stderrData = '';
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
const text = data.toString();
|
||||
stderrData += text;
|
||||
|
||||
if (onProgress && duration) {
|
||||
// Parse time from FFmpeg output: time=00:01:23.45
|
||||
const timeMatch = text.match(/time=(\d{2}):(\d{2}):(\d{2}\.\d{2})/);
|
||||
if (timeMatch) {
|
||||
const hours = parseInt(timeMatch[1]);
|
||||
const minutes = parseInt(timeMatch[2]);
|
||||
const seconds = parseFloat(timeMatch[3]);
|
||||
const currentTime = hours * 3600 + minutes * 60 + seconds;
|
||||
const percent = Math.min(100, (currentTime / duration) * 100);
|
||||
onProgress(percent);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', (err) => {
|
||||
appendLog(`ERROR: ${err.message}\n`);
|
||||
reject(new Error(`FFmpeg error: ${err.message}`));
|
||||
});
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
// Log last 10 lines of output for successful runs
|
||||
const lines = stderrData.split('\n').filter(l => l.trim());
|
||||
const lastLines = lines.slice(-10).join('\n');
|
||||
appendLog(`SUCCESS: Exit code ${code}\n--- Last 10 lines of output ---\n${lastLines}\n`);
|
||||
resolve();
|
||||
} else {
|
||||
// Log full output on failure
|
||||
appendLog(`FAILED: Exit code ${code}\n--- Full error output ---\n${stderrData}\n`);
|
||||
reject(new Error(`FFmpeg failed with exit code ${code}\n${stderrData}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute MP4Box command
|
||||
*/
|
||||
export async function execMP4Box(args: string[]): Promise<void> {
|
||||
const timestamp = new Date().toISOString();
|
||||
const commandLog = `\n=== MP4Box Command [${timestamp}] ===\nMP4Box ${args.join(' ')}\n`;
|
||||
await appendLog(commandLog);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const proc = spawn('MP4Box', args);
|
||||
|
||||
let stdoutData = '';
|
||||
let stderrData = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
stdoutData += data.toString();
|
||||
});
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
stderrData += data.toString();
|
||||
});
|
||||
|
||||
proc.on('error', (err) => {
|
||||
appendLog(`ERROR: ${err.message}\n`);
|
||||
reject(new Error(`MP4Box error: ${err.message}`));
|
||||
});
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
// Log output summary for successful runs
|
||||
const output = stdoutData || stderrData;
|
||||
const lines = output.split('\n').filter(l => l.trim());
|
||||
const lastLines = lines.slice(-10).join('\n');
|
||||
appendLog(`SUCCESS: Exit code ${code}\n--- Last 10 lines of output ---\n${lastLines}\n`);
|
||||
resolve();
|
||||
} else {
|
||||
// Log full output on failure
|
||||
const output = stderrData || stdoutData;
|
||||
appendLog(`FAILED: Exit code ${code}\n--- Full error output ---\n${output}\n`);
|
||||
reject(new Error(`MP4Box failed with exit code ${code}\n${output}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
118
src/utils/video.ts
Normal file
118
src/utils/video.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import type { VideoMetadata } from '../types';
|
||||
|
||||
/**
|
||||
* Get video metadata using ffprobe
|
||||
*/
|
||||
export async function getVideoMetadata(inputPath: string): Promise<VideoMetadata> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const proc = spawn('ffprobe', [
|
||||
'-v', 'error',
|
||||
'-show_entries', 'stream=width,height,duration,r_frame_rate,codec_name,codec_type,bit_rate',
|
||||
'-show_entries', 'format=duration',
|
||||
'-of', 'json',
|
||||
inputPath
|
||||
]);
|
||||
|
||||
let output = '';
|
||||
|
||||
proc.stdout.on('data', (data) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
proc.on('error', (err) => {
|
||||
reject(new Error(`ffprobe error: ${err.message}`));
|
||||
});
|
||||
|
||||
proc.on('close', (code) => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`ffprobe failed with exit code ${code}`));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const data = JSON.parse(output);
|
||||
|
||||
const videoStream = data.streams.find((s: any) => s.codec_type === 'video');
|
||||
const audioStream = data.streams.find((s: any) => s.codec_type === 'audio');
|
||||
const format = data.format;
|
||||
|
||||
if (!videoStream) {
|
||||
reject(new Error('No video stream found in input file'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse frame rate (handle missing or malformed r_frame_rate)
|
||||
let fps = 30; // default fallback
|
||||
if (videoStream.r_frame_rate) {
|
||||
const [num, den] = videoStream.r_frame_rate.split('/').map(Number);
|
||||
if (num && den && den !== 0) {
|
||||
fps = num / den;
|
||||
}
|
||||
}
|
||||
|
||||
// Get duration from stream or format
|
||||
const duration = parseFloat(videoStream.duration || format.duration || '0');
|
||||
|
||||
// Get audio bitrate in kbps
|
||||
const audioBitrateSource = data.streams.find((s: any) => s.codec_type === 'audio' && s.bit_rate);
|
||||
const audioBitrate = audioBitrateSource?.bit_rate
|
||||
? Math.round(parseInt(audioBitrateSource.bit_rate) / 1000)
|
||||
: undefined;
|
||||
|
||||
// Get video bitrate in kbps
|
||||
const videoBitrate = videoStream.bit_rate
|
||||
? Math.round(parseInt(videoStream.bit_rate) / 1000)
|
||||
: undefined;
|
||||
|
||||
resolve({
|
||||
width: videoStream.width,
|
||||
height: videoStream.height,
|
||||
duration,
|
||||
fps,
|
||||
codec: videoStream.codec_name,
|
||||
hasAudio: Boolean(audioStream),
|
||||
audioBitrate,
|
||||
videoBitrate
|
||||
});
|
||||
} catch (err) {
|
||||
reject(new Error(`Failed to parse ffprobe output: ${err}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Select optimal audio bitrate based on source
|
||||
* Don't upscale audio quality - use min of source and target
|
||||
*/
|
||||
export function selectAudioBitrate(
|
||||
sourceAudioBitrate: number | undefined,
|
||||
targetBitrate: number = 256
|
||||
): string {
|
||||
if (!sourceAudioBitrate) {
|
||||
// If we can't detect source bitrate, use target
|
||||
return `${targetBitrate}k`;
|
||||
}
|
||||
|
||||
// Use minimum of source and target (no upscaling)
|
||||
const optimalBitrate = Math.min(sourceAudioBitrate, targetBitrate);
|
||||
|
||||
// Round to common bitrate values for consistency
|
||||
if (optimalBitrate <= 64) return '64k';
|
||||
if (optimalBitrate <= 96) return '96k';
|
||||
if (optimalBitrate <= 128) return '128k';
|
||||
if (optimalBitrate <= 192) return '192k';
|
||||
return '256k';
|
||||
}
|
||||
|
||||
/**
|
||||
* Format time for VTT file (HH:MM:SS.mmm)
|
||||
*/
|
||||
export function formatVttTime(seconds: number): string {
|
||||
const hours = Math.floor(seconds / 3600);
|
||||
const minutes = Math.floor((seconds % 3600) / 60);
|
||||
const secs = seconds % 60;
|
||||
|
||||
return `${String(hours).padStart(2, '0')}:${String(minutes).padStart(2, '0')}:${secs.toFixed(3).padStart(6, '0')}`;
|
||||
}
|
||||
Reference in New Issue
Block a user