diff --git a/server/src/interfaces/media.interface.ts b/server/src/interfaces/media.interface.ts index 468a6ad88d..b90dfb483c 100644 --- a/server/src/interfaces/media.interface.ts +++ b/server/src/interfaces/media.interface.ts @@ -130,6 +130,11 @@ export interface ProbeOptions { countFrames: boolean; } +export interface VideoInterfaces { + dri: string[]; + mali: boolean; +} + export interface IMediaRepository { // image extract(input: string, output: string): Promise; diff --git a/server/src/services/media.service.spec.ts b/server/src/services/media.service.spec.ts index 909b9d02e3..36a9045677 100644 --- a/server/src/services/media.service.spec.ts +++ b/server/src/services/media.service.spec.ts @@ -1,4 +1,3 @@ -import type { Stats } from 'node:fs'; import { SystemConfig } from 'src/config'; import { AssetEntity } from 'src/entities/asset.entity'; import { ExifEntity } from 'src/entities/exif.entity'; @@ -303,7 +302,7 @@ describe(MediaService.name, () => { it('should skip video thumbnail generation if no video stream', async () => { mediaMock.probe.mockResolvedValue(probeStub.noVideoStreams); assetMock.getById.mockResolvedValue(assetStub.video); - await expect(sut.handleGenerateThumbnails({ id: assetStub.video.id })).rejects.toBeInstanceOf(Error); + await expect(sut.handleGenerateThumbnails({ id: assetStub.video.id })).rejects.toThrowError(); expect(mediaMock.generateThumbnail).not.toHaveBeenCalled(); expect(assetMock.update).not.toHaveBeenCalledWith(); }); @@ -770,6 +769,7 @@ describe(MediaService.name, () => { describe('handleVideoConversion', () => { beforeEach(() => { assetMock.getByIds.mockResolvedValue([assetStub.video]); + sut.videoInterfaces = { dri: ['renderD128'], mali: true }; }); it('should skip transcoding if asset not found', async () => { @@ -826,7 +826,7 @@ describe(MediaService.name, () => { systemMock.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig); assetMock.getByIds.mockResolvedValue([assetStub.video]); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toBeDefined(); + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mediaMock.transcode).not.toHaveBeenCalled(); }); @@ -1079,7 +1079,7 @@ describe(MediaService.name, () => { mediaMock.probe.mockResolvedValue(probeStub.videoStream2160p); systemMock.get.mockResolvedValue({ ffmpeg: { transcode: 'invalid' as any } }); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrow(); + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mediaMock.transcode).not.toHaveBeenCalled(); }); @@ -1434,7 +1434,7 @@ describe(MediaService.name, () => { mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mediaMock.transcode).not.toHaveBeenCalled(); }); @@ -1442,7 +1442,7 @@ describe(MediaService.name, () => { mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mediaMock.transcode).not.toHaveBeenCalled(); }); @@ -1628,7 +1628,6 @@ describe(MediaService.name, () => { }); it('should set options for qsv', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1664,7 +1663,6 @@ describe(MediaService.name, () => { }); it('should set options for qsv with custom dri node', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { @@ -1690,7 +1688,6 @@ describe(MediaService.name, () => { }); it('should omit preset for qsv if invalid', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1710,7 +1707,6 @@ describe(MediaService.name, () => { }); it('should set low power mode for qsv if target video codec is vp9', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1730,17 +1726,18 @@ describe(MediaService.name, () => { }); it('should fail for qsv if no hw devices', async () => { - storageMock.readdir.mockRejectedValue(new Error('Could not read directory')); + sut.videoInterfaces = { dri: [], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED); + + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); + expect(mediaMock.transcode).not.toHaveBeenCalled(); - expect(loggerMock.debug).toHaveBeenCalledWith('No devices found in /dev/dri.'); }); it('should prefer higher index renderD* device for qsv', async () => { - storageMock.readdir.mockResolvedValue(['card1', 'renderD129', 'card0', 'renderD128']); + sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1760,7 +1757,6 @@ describe(MediaService.name, () => { }); it('should use hardware decoding for qsv if enabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, @@ -1790,7 +1786,6 @@ describe(MediaService.name, () => { }); it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, @@ -1820,7 +1815,7 @@ describe(MediaService.name, () => { }); it('should use preferred device for qsv when hardware decoding', async () => { - storageMock.readdir.mockResolvedValue(['renderD128', 'renderD129', 'renderD130']); + sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' }, @@ -1840,7 +1835,6 @@ describe(MediaService.name, () => { }); it('should set format to nv12 for qsv if input is not yuv420p', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.videoStream10Bit); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true }, @@ -1866,7 +1860,6 @@ describe(MediaService.name, () => { }); it('should set options for vaapi', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1898,7 +1891,6 @@ describe(MediaService.name, () => { }); it('should set vbr options for vaapi when max bitrate is enabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1924,7 +1916,6 @@ describe(MediaService.name, () => { }); it('should set cq options for vaapi when max bitrate is disabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1950,7 +1941,6 @@ describe(MediaService.name, () => { }); it('should omit preset for vaapi if invalid', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1970,7 +1960,7 @@ describe(MediaService.name, () => { }); it('should prefer higher index renderD* device for vaapi', async () => { - storageMock.readdir.mockResolvedValue(['card1', 'renderD129', 'card0', 'renderD128']); + sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -1990,7 +1980,7 @@ describe(MediaService.name, () => { }); it('should select specific gpu node if selected', async () => { - storageMock.readdir.mockResolvedValue(['renderD129', 'card1', 'card0', 'renderD128']); + sut.videoInterfaces = { dri: ['renderD129', 'card1', 'card0', 'renderD128'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' }, @@ -2012,7 +2002,6 @@ describe(MediaService.name, () => { }); it('should use hardware decoding for vaapi if enabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true }, @@ -2041,7 +2030,6 @@ describe(MediaService.name, () => { }); it('should use hardware tone-mapping for vaapi if hardware decoding is enabled and should tone map', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true }, @@ -2066,7 +2054,6 @@ describe(MediaService.name, () => { }); it('should set format to nv12 for vaapi if input is not yuv420p', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); mediaMock.probe.mockResolvedValue(probeStub.videoStream10Bit); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true }, @@ -2087,7 +2074,7 @@ describe(MediaService.name, () => { }); it('should use preferred device for vaapi when hardware decoding', async () => { - storageMock.readdir.mockResolvedValue(['renderD128', 'renderD129', 'renderD130']); + sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' }, @@ -2106,8 +2093,47 @@ describe(MediaService.name, () => { ); }); - it('should fallback to sw transcoding if hw transcoding fails', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); + it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => { + mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); + systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } }); + assetMock.getByIds.mockResolvedValue([assetStub.video]); + mediaMock.transcode.mockRejectedValueOnce(new Error('error')); + await sut.handleVideoConversion({ id: assetStub.video.id }); + expect(mediaMock.transcode).toHaveBeenCalledTimes(2); + expect(mediaMock.transcode).toHaveBeenLastCalledWith( + '/original/path.ext', + 'upload/encoded-video/user-id/as/se/asset-id.mp4', + expect.objectContaining({ + inputOptions: expect.arrayContaining([ + '-init_hw_device vaapi=accel:/dev/dri/renderD128', + '-filter_hw_device accel', + ]), + outputOptions: expect.arrayContaining([`-c:v h264_vaapi`]), + twoPass: false, + }), + ); + }); + + it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => { + mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); + systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } }); + assetMock.getByIds.mockResolvedValue([assetStub.video]); + mediaMock.transcode.mockRejectedValueOnce(new Error('error')); + mediaMock.transcode.mockRejectedValueOnce(new Error('error')); + await sut.handleVideoConversion({ id: assetStub.video.id }); + expect(mediaMock.transcode).toHaveBeenCalledTimes(3); + expect(mediaMock.transcode).toHaveBeenLastCalledWith( + '/original/path.ext', + 'upload/encoded-video/user-id/as/se/asset-id.mp4', + expect.objectContaining({ + inputOptions: expect.any(Array), + outputOptions: expect.arrayContaining(['-c:v h264']), + twoPass: false, + }), + ); + }); + + it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => { mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -2126,17 +2152,15 @@ describe(MediaService.name, () => { }); it('should fail for vaapi if no hw devices', async () => { - storageMock.readdir.mockResolvedValue([]); + sut.videoInterfaces = { dri: [], mali: true }; mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); - await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED); + await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError(); expect(mediaMock.transcode).not.toHaveBeenCalled(); }); it('should set options for rkmpp', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } }); assetMock.getByIds.mockResolvedValue([assetStub.video]); @@ -2171,8 +2195,6 @@ describe(MediaService.name, () => { }); it('should set vbr options for rkmpp when max bitrate is enabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats); mediaMock.probe.mockResolvedValue(probeStub.videoStreamVp9); systemMock.get.mockResolvedValue({ ffmpeg: { @@ -2196,8 +2218,6 @@ describe(MediaService.name, () => { }); it('should set cqp options for rkmpp when max bitrate is disabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats); mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, @@ -2216,8 +2236,6 @@ describe(MediaService.name, () => { }); it('should set OpenCL tonemapping options for rkmpp when OpenCL is available', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats); mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, @@ -2240,8 +2258,7 @@ describe(MediaService.name, () => { }); it('should set hardware decoding options for rkmpp when hardware decoding is enabled with no OpenCL on non-HDR file', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => false, isCharacterDevice: () => false } as Stats); + sut.videoInterfaces = { dri: ['renderD128'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.noAudioStreams); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, @@ -2262,8 +2279,6 @@ describe(MediaService.name, () => { }); it('should use software decoding and tone-mapping if hardware decoding is disabled', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats); mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' }, @@ -2286,8 +2301,7 @@ describe(MediaService.name, () => { }); it('should use software tone-mapping if opencl is not available', async () => { - storageMock.readdir.mockResolvedValue(['renderD128']); - storageMock.stat.mockResolvedValue({ isFile: () => false, isCharacterDevice: () => false } as Stats); + sut.videoInterfaces = { dri: ['renderD128'], mali: false }; mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR); systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' }, diff --git a/server/src/services/media.service.ts b/server/src/services/media.service.ts index f433748ec4..7036bd32e8 100644 --- a/server/src/services/media.service.ts +++ b/server/src/services/media.service.ts @@ -1,7 +1,7 @@ import { Injectable } from '@nestjs/common'; import { dirname } from 'node:path'; import { StorageCore } from 'src/cores/storage.core'; -import { OnJob } from 'src/decorators'; +import { OnEvent, OnJob } from 'src/decorators'; import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto'; import { AssetEntity } from 'src/entities/asset.entity'; import { @@ -27,7 +27,7 @@ import { JobStatus, QueueName, } from 'src/interfaces/job.interface'; -import { AudioStreamInfo, TranscodeCommand, VideoFormat, VideoStreamInfo } from 'src/interfaces/media.interface'; +import { AudioStreamInfo, VideoFormat, VideoInterfaces, VideoStreamInfo } from 'src/interfaces/media.interface'; import { BaseService } from 'src/services/base.service'; import { getAssetFiles } from 'src/utils/asset.util'; import { BaseConfig, ThumbnailConfig } from 'src/utils/media'; @@ -36,8 +36,13 @@ import { usePagination } from 'src/utils/pagination'; @Injectable() export class MediaService extends BaseService { - private maliOpenCL?: boolean; - private devices?: string[]; + videoInterfaces: VideoInterfaces = { dri: [], mali: false }; + + @OnEvent({ name: 'app.bootstrap' }) + async onBootstrap() { + const [dri, mali] = await Promise.all([this.getDevices(), this.hasMaliOpenCL()]); + this.videoInterfaces = { dri, mali }; + } @OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION }) async handleQueueGenerateThumbnails({ force }: JobOf): Promise { @@ -300,19 +305,19 @@ export class MediaService extends BaseService { const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, { countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs }); - const mainVideoStream = this.getMainStream(videoStreams); - const mainAudioStream = this.getMainStream(audioStreams); - if (!mainVideoStream || !format.formatName) { + const videoStream = this.getMainStream(videoStreams); + const audioStream = this.getMainStream(audioStreams); + if (!videoStream || !format.formatName) { return JobStatus.FAILED; } - if (!mainVideoStream.height || !mainVideoStream.width) { + if (!videoStream.height || !videoStream.width) { this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`); return JobStatus.FAILED; } - const { ffmpeg } = await this.getConfig({ withCache: true }); - const target = this.getTranscodeTarget(ffmpeg, mainVideoStream, mainAudioStream); + let { ffmpeg } = await this.getConfig({ withCache: true }); + const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream); if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) { if (asset.encodedVideoPath) { this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`); @@ -325,15 +330,7 @@ export class MediaService extends BaseService { return JobStatus.SKIPPED; } - let command: TranscodeCommand; - try { - const config = BaseConfig.create(ffmpeg, await this.getDevices(), await this.hasMaliOpenCL()); - command = config.getCommand(target, mainVideoStream, mainAudioStream); - } catch (error) { - this.logger.error(`An error occurred while configuring transcoding options: ${error}`); - return JobStatus.FAILED; - } - + const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream); if (ffmpeg.accel === TranscodeHWAccel.DISABLED) { this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`); } else { @@ -354,8 +351,8 @@ export class MediaService extends BaseService { if (ffmpeg.accelDecode) { try { this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`); - const config = BaseConfig.create({ ...ffmpeg, accelDecode: false }); - command = config.getCommand(target, mainVideoStream, mainAudioStream); + ffmpeg = { ...ffmpeg, accelDecode: false }; + const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream); await this.mediaRepository.transcode(input, output, command); partialFallbackSuccess = true; } catch (error: any) { @@ -365,8 +362,8 @@ export class MediaService extends BaseService { if (!partialFallbackSuccess) { this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`); - const config = BaseConfig.create({ ...ffmpeg, accel: TranscodeHWAccel.DISABLED }); - command = config.getCommand(target, mainVideoStream, mainAudioStream); + ffmpeg = { ...ffmpeg, accel: TranscodeHWAccel.DISABLED }; + const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream); await this.mediaRepository.transcode(input, output, command); } } @@ -507,30 +504,24 @@ export class MediaService extends BaseService { } private async getDevices() { - if (!this.devices) { - try { - this.devices = await this.storageRepository.readdir('/dev/dri'); - } catch { - this.logger.debug('No devices found in /dev/dri.'); - this.devices = []; - } + try { + return await this.storageRepository.readdir('/dev/dri'); + } catch { + this.logger.debug('No devices found in /dev/dri.'); + return []; } - - return this.devices; } private async hasMaliOpenCL() { - if (this.maliOpenCL === undefined) { - try { - const maliIcdStat = await this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd'); - const maliDeviceStat = await this.storageRepository.stat('/dev/mali0'); - this.maliOpenCL = maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice(); - } catch { - this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping'); - this.maliOpenCL = false; - } + try { + const [maliIcdStat, maliDeviceStat] = await Promise.all([ + this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd'), + this.storageRepository.stat('/dev/mali0'), + ]); + return maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice(); + } catch { + this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping'); + return false; } - - return this.maliOpenCL; } } diff --git a/server/src/utils/media.ts b/server/src/utils/media.ts index 226f95b4bb..678e8cb15a 100644 --- a/server/src/utils/media.ts +++ b/server/src/utils/media.ts @@ -7,6 +7,7 @@ import { VideoCodecHWConfig, VideoCodecSWConfig, VideoFormat, + VideoInterfaces, VideoStreamInfo, } from 'src/interfaces/media.interface'; @@ -14,11 +15,11 @@ export class BaseConfig implements VideoCodecSWConfig { readonly presets = ['veryslow', 'slower', 'slow', 'medium', 'fast', 'faster', 'veryfast', 'superfast', 'ultrafast']; protected constructor(protected config: SystemConfigFFmpegDto) {} - static create(config: SystemConfigFFmpegDto, devices: string[] = [], hasMaliOpenCL = false): VideoCodecSWConfig { + static create(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces): VideoCodecSWConfig { if (config.accel === TranscodeHWAccel.DISABLED) { return this.getSWCodecConfig(config); } - return this.getHWCodecConfig(config, devices, hasMaliOpenCL); + return this.getHWCodecConfig(config, interfaces); } private static getSWCodecConfig(config: SystemConfigFFmpegDto) { @@ -41,27 +42,31 @@ export class BaseConfig implements VideoCodecSWConfig { } } - private static getHWCodecConfig(config: SystemConfigFFmpegDto, devices: string[] = [], hasMaliOpenCL = false) { + private static getHWCodecConfig(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces) { let handler: VideoCodecHWConfig; switch (config.accel) { case TranscodeHWAccel.NVENC: { - handler = config.accelDecode ? new NvencHwDecodeConfig(config) : new NvencSwDecodeConfig(config); + handler = config.accelDecode + ? new NvencHwDecodeConfig(config, interfaces) + : new NvencSwDecodeConfig(config, interfaces); break; } case TranscodeHWAccel.QSV: { - handler = config.accelDecode ? new QsvHwDecodeConfig(config, devices) : new QsvSwDecodeConfig(config, devices); + handler = config.accelDecode + ? new QsvHwDecodeConfig(config, interfaces) + : new QsvSwDecodeConfig(config, interfaces); break; } case TranscodeHWAccel.VAAPI: { handler = config.accelDecode - ? new VaapiHwDecodeConfig(config, devices) - : new VaapiSwDecodeConfig(config, devices); + ? new VaapiHwDecodeConfig(config, interfaces) + : new VaapiSwDecodeConfig(config, interfaces); break; } case TranscodeHWAccel.RKMPP: { handler = config.accelDecode - ? new RkmppHwDecodeConfig(config, devices, hasMaliOpenCL) - : new RkmppSwDecodeConfig(config, devices); + ? new RkmppHwDecodeConfig(config, interfaces) + : new RkmppSwDecodeConfig(config, interfaces); break; } default: { @@ -323,13 +328,15 @@ export class BaseConfig implements VideoCodecSWConfig { export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig { protected device: string; + protected interfaces: VideoInterfaces; constructor( protected config: SystemConfigFFmpegDto, - devices: string[] = [], + interfaces: VideoInterfaces, ) { super(config); - this.device = this.getDevice(devices); + this.interfaces = interfaces; + this.device = this.getDevice(interfaces); } getSupportedCodecs() { @@ -346,16 +353,16 @@ export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig { }); } - getDevice(devices: string[]) { + getDevice({ dri }: VideoInterfaces) { if (this.config.preferredHwDevice === 'auto') { // eslint-disable-next-line unicorn/no-array-reduce - return `/dev/dri/${this.validateDevices(devices).reduce(function (a, b) { + return `/dev/dri/${this.validateDevices(dri).reduce(function (a, b) { return a.localeCompare(b) < 0 ? b : a; })}`; } const deviceName = this.config.preferredHwDevice.replace('/dev/dri/', ''); - if (!devices.includes(deviceName)) { + if (!dri.includes(deviceName)) { throw new Error(`Device '${deviceName}' does not exist. If using Docker, make sure this device is mounted`); } @@ -886,13 +893,6 @@ export class VaapiHwDecodeConfig extends VaapiSwDecodeConfig { } export class RkmppSwDecodeConfig extends BaseHWConfig { - constructor( - protected config: SystemConfigFFmpegDto, - devices: string[] = [], - ) { - super(config, devices); - } - eligibleForTwoPass(): boolean { return false; } @@ -937,16 +937,6 @@ export class RkmppSwDecodeConfig extends BaseHWConfig { } export class RkmppHwDecodeConfig extends RkmppSwDecodeConfig { - protected hasMaliOpenCL: boolean; - constructor( - protected config: SystemConfigFFmpegDto, - devices: string[] = [], - hasMaliOpenCL = false, - ) { - super(config, devices); - this.hasMaliOpenCL = hasMaliOpenCL; - } - getBaseInputOptions() { return ['-hwaccel rkmpp', '-hwaccel_output_format drm_prime', '-afbc rga', '-noautorotate']; } @@ -954,7 +944,7 @@ export class RkmppHwDecodeConfig extends RkmppSwDecodeConfig { getFilterOptions(videoStream: VideoStreamInfo) { if (this.shouldToneMap(videoStream)) { const { primaries, transfer, matrix } = this.getColors(); - if (this.hasMaliOpenCL) { + if (this.interfaces.mali) { return [ // use RKMPP for scaling, OpenCL for tone mapping `scale_rkrga=${this.getScaling(videoStream)}:format=p010:afbc=1:async_depth=4`,