mirror of
https://github.com/immich-app/immich.git
synced 2024-12-29 15:11:58 +00:00
fix(server): partial fallback for hardware transcoding (#14611)
This commit is contained in:
parent
1ba622adc9
commit
60c783bbe9
4 changed files with 121 additions and 121 deletions
|
@ -130,6 +130,11 @@ export interface ProbeOptions {
|
||||||
countFrames: boolean;
|
countFrames: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface VideoInterfaces {
|
||||||
|
dri: string[];
|
||||||
|
mali: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface IMediaRepository {
|
export interface IMediaRepository {
|
||||||
// image
|
// image
|
||||||
extract(input: string, output: string): Promise<boolean>;
|
extract(input: string, output: string): Promise<boolean>;
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import type { Stats } from 'node:fs';
|
|
||||||
import { SystemConfig } from 'src/config';
|
import { SystemConfig } from 'src/config';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
import { AssetEntity } from 'src/entities/asset.entity';
|
||||||
import { ExifEntity } from 'src/entities/exif.entity';
|
import { ExifEntity } from 'src/entities/exif.entity';
|
||||||
|
@ -303,7 +302,7 @@ describe(MediaService.name, () => {
|
||||||
it('should skip video thumbnail generation if no video stream', async () => {
|
it('should skip video thumbnail generation if no video stream', async () => {
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.noVideoStreams);
|
mediaMock.probe.mockResolvedValue(probeStub.noVideoStreams);
|
||||||
assetMock.getById.mockResolvedValue(assetStub.video);
|
assetMock.getById.mockResolvedValue(assetStub.video);
|
||||||
await expect(sut.handleGenerateThumbnails({ id: assetStub.video.id })).rejects.toBeInstanceOf(Error);
|
await expect(sut.handleGenerateThumbnails({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
expect(mediaMock.generateThumbnail).not.toHaveBeenCalled();
|
expect(mediaMock.generateThumbnail).not.toHaveBeenCalled();
|
||||||
expect(assetMock.update).not.toHaveBeenCalledWith();
|
expect(assetMock.update).not.toHaveBeenCalledWith();
|
||||||
});
|
});
|
||||||
|
@ -770,6 +769,7 @@ describe(MediaService.name, () => {
|
||||||
describe('handleVideoConversion', () => {
|
describe('handleVideoConversion', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
sut.videoInterfaces = { dri: ['renderD128'], mali: true };
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should skip transcoding if asset not found', async () => {
|
it('should skip transcoding if asset not found', async () => {
|
||||||
|
@ -826,7 +826,7 @@ describe(MediaService.name, () => {
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
|
systemMock.get.mockResolvedValue({ ffmpeg: { transcode: 'foo' } } as never as SystemConfig);
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
|
||||||
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toBeDefined();
|
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1079,7 +1079,7 @@ describe(MediaService.name, () => {
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStream2160p);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStream2160p);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { transcode: 'invalid' as any } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { transcode: 'invalid' as any } });
|
||||||
|
|
||||||
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrow();
|
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1434,7 +1434,7 @@ describe(MediaService.name, () => {
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.NVENC, targetVideoCodec: VideoCodec.VP9 } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
|
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1442,7 +1442,7 @@ describe(MediaService.name, () => {
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: 'invalid' as any } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
|
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1628,7 +1628,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set options for qsv', async () => {
|
it('should set options for qsv', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, maxBitrate: '10000k' } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1664,7 +1663,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set options for qsv with custom dri node', async () => {
|
it('should set options for qsv with custom dri node', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: {
|
ffmpeg: {
|
||||||
|
@ -1690,7 +1688,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should omit preset for qsv if invalid', async () => {
|
it('should omit preset for qsv if invalid', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, preset: 'invalid' } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1710,7 +1707,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set low power mode for qsv if target video codec is vp9', async () => {
|
it('should set low power mode for qsv if target video codec is vp9', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV, targetVideoCodec: VideoCodec.VP9 } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1730,17 +1726,18 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for qsv if no hw devices', async () => {
|
it('should fail for qsv if no hw devices', async () => {
|
||||||
storageMock.readdir.mockRejectedValue(new Error('Could not read directory'));
|
sut.videoInterfaces = { dri: [], mali: false };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
|
|
||||||
|
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
|
|
||||||
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
||||||
expect(loggerMock.debug).toHaveBeenCalledWith('No devices found in /dev/dri.');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should prefer higher index renderD* device for qsv', async () => {
|
it('should prefer higher index renderD* device for qsv', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['card1', 'renderD129', 'card0', 'renderD128']);
|
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.QSV } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1760,7 +1757,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use hardware decoding for qsv if enabled', async () => {
|
it('should use hardware decoding for qsv if enabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
|
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
|
||||||
|
@ -1790,7 +1786,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => {
|
it('should use hardware tone-mapping for qsv if hardware decoding is enabled and should tone map', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
|
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
|
||||||
|
@ -1820,7 +1815,7 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use preferred device for qsv when hardware decoding', async () => {
|
it('should use preferred device for qsv when hardware decoding', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128', 'renderD129', 'renderD130']);
|
sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' },
|
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true, preferredHwDevice: 'renderD129' },
|
||||||
|
@ -1840,7 +1835,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set format to nv12 for qsv if input is not yuv420p', async () => {
|
it('should set format to nv12 for qsv if input is not yuv420p', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStream10Bit);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStream10Bit);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
|
ffmpeg: { accel: TranscodeHWAccel.QSV, accelDecode: true },
|
||||||
|
@ -1866,7 +1860,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set options for vaapi', async () => {
|
it('should set options for vaapi', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1898,7 +1891,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set vbr options for vaapi when max bitrate is enabled', async () => {
|
it('should set vbr options for vaapi when max bitrate is enabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, maxBitrate: '10000k' } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1924,7 +1916,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set cq options for vaapi when max bitrate is disabled', async () => {
|
it('should set cq options for vaapi when max bitrate is disabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1950,7 +1941,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should omit preset for vaapi if invalid', async () => {
|
it('should omit preset for vaapi if invalid', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, preset: 'invalid' } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1970,7 +1960,7 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should prefer higher index renderD* device for vaapi', async () => {
|
it('should prefer higher index renderD* device for vaapi', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['card1', 'renderD129', 'card0', 'renderD128']);
|
sut.videoInterfaces = { dri: ['card1', 'renderD129', 'card0', 'renderD128'], mali: false };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -1990,7 +1980,7 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should select specific gpu node if selected', async () => {
|
it('should select specific gpu node if selected', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD129', 'card1', 'card0', 'renderD128']);
|
sut.videoInterfaces = { dri: ['renderD129', 'card1', 'card0', 'renderD128'], mali: false };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' },
|
ffmpeg: { accel: TranscodeHWAccel.VAAPI, preferredHwDevice: '/dev/dri/renderD128' },
|
||||||
|
@ -2012,7 +2002,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use hardware decoding for vaapi if enabled', async () => {
|
it('should use hardware decoding for vaapi if enabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
|
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
|
||||||
|
@ -2041,7 +2030,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use hardware tone-mapping for vaapi if hardware decoding is enabled and should tone map', async () => {
|
it('should use hardware tone-mapping for vaapi if hardware decoding is enabled and should tone map', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
|
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
|
||||||
|
@ -2066,7 +2054,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set format to nv12 for vaapi if input is not yuv420p', async () => {
|
it('should set format to nv12 for vaapi if input is not yuv420p', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStream10Bit);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStream10Bit);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
|
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true },
|
||||||
|
@ -2087,7 +2074,7 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use preferred device for vaapi when hardware decoding', async () => {
|
it('should use preferred device for vaapi when hardware decoding', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128', 'renderD129', 'renderD130']);
|
sut.videoInterfaces = { dri: ['renderD128', 'renderD129', 'renderD130'], mali: false };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' },
|
ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true, preferredHwDevice: 'renderD129' },
|
||||||
|
@ -2106,8 +2093,47 @@ describe(MediaService.name, () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fallback to sw transcoding if hw transcoding fails', async () => {
|
it('should fallback to hw encoding and sw decoding if hw transcoding fails and hw decoding is enabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
|
||||||
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
mediaMock.transcode.mockRejectedValueOnce(new Error('error'));
|
||||||
|
await sut.handleVideoConversion({ id: assetStub.video.id });
|
||||||
|
expect(mediaMock.transcode).toHaveBeenCalledTimes(2);
|
||||||
|
expect(mediaMock.transcode).toHaveBeenLastCalledWith(
|
||||||
|
'/original/path.ext',
|
||||||
|
'upload/encoded-video/user-id/as/se/asset-id.mp4',
|
||||||
|
expect.objectContaining({
|
||||||
|
inputOptions: expect.arrayContaining([
|
||||||
|
'-init_hw_device vaapi=accel:/dev/dri/renderD128',
|
||||||
|
'-filter_hw_device accel',
|
||||||
|
]),
|
||||||
|
outputOptions: expect.arrayContaining([`-c:v h264_vaapi`]),
|
||||||
|
twoPass: false,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fallback to sw decoding if fallback to sw decoding + hw encoding fails', async () => {
|
||||||
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI, accelDecode: true } });
|
||||||
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
mediaMock.transcode.mockRejectedValueOnce(new Error('error'));
|
||||||
|
mediaMock.transcode.mockRejectedValueOnce(new Error('error'));
|
||||||
|
await sut.handleVideoConversion({ id: assetStub.video.id });
|
||||||
|
expect(mediaMock.transcode).toHaveBeenCalledTimes(3);
|
||||||
|
expect(mediaMock.transcode).toHaveBeenLastCalledWith(
|
||||||
|
'/original/path.ext',
|
||||||
|
'upload/encoded-video/user-id/as/se/asset-id.mp4',
|
||||||
|
expect.objectContaining({
|
||||||
|
inputOptions: expect.any(Array),
|
||||||
|
outputOptions: expect.arrayContaining(['-c:v h264']),
|
||||||
|
twoPass: false,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fallback to sw transcoding if hw transcoding fails and hw decoding is disabled', async () => {
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -2126,17 +2152,15 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for vaapi if no hw devices', async () => {
|
it('should fail for vaapi if no hw devices', async () => {
|
||||||
storageMock.readdir.mockResolvedValue([]);
|
sut.videoInterfaces = { dri: [], mali: true };
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.VAAPI } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).resolves.toBe(JobStatus.FAILED);
|
await expect(sut.handleVideoConversion({ id: assetStub.video.id })).rejects.toThrowError();
|
||||||
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
expect(mediaMock.transcode).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set options for rkmpp', async () => {
|
it('should set options for rkmpp', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } });
|
systemMock.get.mockResolvedValue({ ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true } });
|
||||||
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
assetMock.getByIds.mockResolvedValue([assetStub.video]);
|
||||||
|
@ -2171,8 +2195,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set vbr options for rkmpp when max bitrate is enabled', async () => {
|
it('should set vbr options for rkmpp when max bitrate is enabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamVp9);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStreamVp9);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: {
|
ffmpeg: {
|
||||||
|
@ -2196,8 +2218,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set cqp options for rkmpp when max bitrate is disabled', async () => {
|
it('should set cqp options for rkmpp when max bitrate is disabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
mediaMock.probe.mockResolvedValue(probeStub.matroskaContainer);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
||||||
|
@ -2216,8 +2236,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set OpenCL tonemapping options for rkmpp when OpenCL is available', async () => {
|
it('should set OpenCL tonemapping options for rkmpp when OpenCL is available', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
||||||
|
@ -2240,8 +2258,7 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set hardware decoding options for rkmpp when hardware decoding is enabled with no OpenCL on non-HDR file', async () => {
|
it('should set hardware decoding options for rkmpp when hardware decoding is enabled with no OpenCL on non-HDR file', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
sut.videoInterfaces = { dri: ['renderD128'], mali: false };
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => false, isCharacterDevice: () => false } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.noAudioStreams);
|
mediaMock.probe.mockResolvedValue(probeStub.noAudioStreams);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
||||||
|
@ -2262,8 +2279,6 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use software decoding and tone-mapping if hardware decoding is disabled', async () => {
|
it('should use software decoding and tone-mapping if hardware decoding is disabled', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => true, isCharacterDevice: () => true } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' },
|
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: false, crf: 30, maxBitrate: '0' },
|
||||||
|
@ -2286,8 +2301,7 @@ describe(MediaService.name, () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use software tone-mapping if opencl is not available', async () => {
|
it('should use software tone-mapping if opencl is not available', async () => {
|
||||||
storageMock.readdir.mockResolvedValue(['renderD128']);
|
sut.videoInterfaces = { dri: ['renderD128'], mali: false };
|
||||||
storageMock.stat.mockResolvedValue({ isFile: () => false, isCharacterDevice: () => false } as Stats);
|
|
||||||
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
mediaMock.probe.mockResolvedValue(probeStub.videoStreamHDR);
|
||||||
systemMock.get.mockResolvedValue({
|
systemMock.get.mockResolvedValue({
|
||||||
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
ffmpeg: { accel: TranscodeHWAccel.RKMPP, accelDecode: true, crf: 30, maxBitrate: '0' },
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { dirname } from 'node:path';
|
import { dirname } from 'node:path';
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
import { StorageCore } from 'src/cores/storage.core';
|
||||||
import { OnJob } from 'src/decorators';
|
import { OnEvent, OnJob } from 'src/decorators';
|
||||||
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
||||||
import { AssetEntity } from 'src/entities/asset.entity';
|
import { AssetEntity } from 'src/entities/asset.entity';
|
||||||
import {
|
import {
|
||||||
|
@ -27,7 +27,7 @@ import {
|
||||||
JobStatus,
|
JobStatus,
|
||||||
QueueName,
|
QueueName,
|
||||||
} from 'src/interfaces/job.interface';
|
} from 'src/interfaces/job.interface';
|
||||||
import { AudioStreamInfo, TranscodeCommand, VideoFormat, VideoStreamInfo } from 'src/interfaces/media.interface';
|
import { AudioStreamInfo, VideoFormat, VideoInterfaces, VideoStreamInfo } from 'src/interfaces/media.interface';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { getAssetFiles } from 'src/utils/asset.util';
|
import { getAssetFiles } from 'src/utils/asset.util';
|
||||||
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
||||||
|
@ -36,8 +36,13 @@ import { usePagination } from 'src/utils/pagination';
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class MediaService extends BaseService {
|
export class MediaService extends BaseService {
|
||||||
private maliOpenCL?: boolean;
|
videoInterfaces: VideoInterfaces = { dri: [], mali: false };
|
||||||
private devices?: string[];
|
|
||||||
|
@OnEvent({ name: 'app.bootstrap' })
|
||||||
|
async onBootstrap() {
|
||||||
|
const [dri, mali] = await Promise.all([this.getDevices(), this.hasMaliOpenCL()]);
|
||||||
|
this.videoInterfaces = { dri, mali };
|
||||||
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
|
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
|
||||||
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
|
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
|
||||||
|
@ -300,19 +305,19 @@ export class MediaService extends BaseService {
|
||||||
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
|
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
|
||||||
countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs
|
countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs
|
||||||
});
|
});
|
||||||
const mainVideoStream = this.getMainStream(videoStreams);
|
const videoStream = this.getMainStream(videoStreams);
|
||||||
const mainAudioStream = this.getMainStream(audioStreams);
|
const audioStream = this.getMainStream(audioStreams);
|
||||||
if (!mainVideoStream || !format.formatName) {
|
if (!videoStream || !format.formatName) {
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!mainVideoStream.height || !mainVideoStream.width) {
|
if (!videoStream.height || !videoStream.width) {
|
||||||
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
|
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { ffmpeg } = await this.getConfig({ withCache: true });
|
let { ffmpeg } = await this.getConfig({ withCache: true });
|
||||||
const target = this.getTranscodeTarget(ffmpeg, mainVideoStream, mainAudioStream);
|
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
|
||||||
if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) {
|
if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) {
|
||||||
if (asset.encodedVideoPath) {
|
if (asset.encodedVideoPath) {
|
||||||
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
|
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
|
||||||
|
@ -325,15 +330,7 @@ export class MediaService extends BaseService {
|
||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
let command: TranscodeCommand;
|
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
||||||
try {
|
|
||||||
const config = BaseConfig.create(ffmpeg, await this.getDevices(), await this.hasMaliOpenCL());
|
|
||||||
command = config.getCommand(target, mainVideoStream, mainAudioStream);
|
|
||||||
} catch (error) {
|
|
||||||
this.logger.error(`An error occurred while configuring transcoding options: ${error}`);
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
|
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
|
||||||
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
|
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
|
||||||
} else {
|
} else {
|
||||||
|
@ -354,8 +351,8 @@ export class MediaService extends BaseService {
|
||||||
if (ffmpeg.accelDecode) {
|
if (ffmpeg.accelDecode) {
|
||||||
try {
|
try {
|
||||||
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
|
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
|
||||||
const config = BaseConfig.create({ ...ffmpeg, accelDecode: false });
|
ffmpeg = { ...ffmpeg, accelDecode: false };
|
||||||
command = config.getCommand(target, mainVideoStream, mainAudioStream);
|
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
||||||
await this.mediaRepository.transcode(input, output, command);
|
await this.mediaRepository.transcode(input, output, command);
|
||||||
partialFallbackSuccess = true;
|
partialFallbackSuccess = true;
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
|
@ -365,8 +362,8 @@ export class MediaService extends BaseService {
|
||||||
|
|
||||||
if (!partialFallbackSuccess) {
|
if (!partialFallbackSuccess) {
|
||||||
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
|
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
|
||||||
const config = BaseConfig.create({ ...ffmpeg, accel: TranscodeHWAccel.DISABLED });
|
ffmpeg = { ...ffmpeg, accel: TranscodeHWAccel.DISABLED };
|
||||||
command = config.getCommand(target, mainVideoStream, mainAudioStream);
|
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
||||||
await this.mediaRepository.transcode(input, output, command);
|
await this.mediaRepository.transcode(input, output, command);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -507,30 +504,24 @@ export class MediaService extends BaseService {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getDevices() {
|
private async getDevices() {
|
||||||
if (!this.devices) {
|
try {
|
||||||
try {
|
return await this.storageRepository.readdir('/dev/dri');
|
||||||
this.devices = await this.storageRepository.readdir('/dev/dri');
|
} catch {
|
||||||
} catch {
|
this.logger.debug('No devices found in /dev/dri.');
|
||||||
this.logger.debug('No devices found in /dev/dri.');
|
return [];
|
||||||
this.devices = [];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.devices;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async hasMaliOpenCL() {
|
private async hasMaliOpenCL() {
|
||||||
if (this.maliOpenCL === undefined) {
|
try {
|
||||||
try {
|
const [maliIcdStat, maliDeviceStat] = await Promise.all([
|
||||||
const maliIcdStat = await this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd');
|
this.storageRepository.stat('/etc/OpenCL/vendors/mali.icd'),
|
||||||
const maliDeviceStat = await this.storageRepository.stat('/dev/mali0');
|
this.storageRepository.stat('/dev/mali0'),
|
||||||
this.maliOpenCL = maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice();
|
]);
|
||||||
} catch {
|
return maliIcdStat.isFile() && maliDeviceStat.isCharacterDevice();
|
||||||
this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping');
|
} catch {
|
||||||
this.maliOpenCL = false;
|
this.logger.debug('OpenCL not available for transcoding, so RKMPP acceleration will use CPU tonemapping');
|
||||||
}
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.maliOpenCL;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
VideoCodecHWConfig,
|
VideoCodecHWConfig,
|
||||||
VideoCodecSWConfig,
|
VideoCodecSWConfig,
|
||||||
VideoFormat,
|
VideoFormat,
|
||||||
|
VideoInterfaces,
|
||||||
VideoStreamInfo,
|
VideoStreamInfo,
|
||||||
} from 'src/interfaces/media.interface';
|
} from 'src/interfaces/media.interface';
|
||||||
|
|
||||||
|
@ -14,11 +15,11 @@ export class BaseConfig implements VideoCodecSWConfig {
|
||||||
readonly presets = ['veryslow', 'slower', 'slow', 'medium', 'fast', 'faster', 'veryfast', 'superfast', 'ultrafast'];
|
readonly presets = ['veryslow', 'slower', 'slow', 'medium', 'fast', 'faster', 'veryfast', 'superfast', 'ultrafast'];
|
||||||
protected constructor(protected config: SystemConfigFFmpegDto) {}
|
protected constructor(protected config: SystemConfigFFmpegDto) {}
|
||||||
|
|
||||||
static create(config: SystemConfigFFmpegDto, devices: string[] = [], hasMaliOpenCL = false): VideoCodecSWConfig {
|
static create(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces): VideoCodecSWConfig {
|
||||||
if (config.accel === TranscodeHWAccel.DISABLED) {
|
if (config.accel === TranscodeHWAccel.DISABLED) {
|
||||||
return this.getSWCodecConfig(config);
|
return this.getSWCodecConfig(config);
|
||||||
}
|
}
|
||||||
return this.getHWCodecConfig(config, devices, hasMaliOpenCL);
|
return this.getHWCodecConfig(config, interfaces);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static getSWCodecConfig(config: SystemConfigFFmpegDto) {
|
private static getSWCodecConfig(config: SystemConfigFFmpegDto) {
|
||||||
|
@ -41,27 +42,31 @@ export class BaseConfig implements VideoCodecSWConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static getHWCodecConfig(config: SystemConfigFFmpegDto, devices: string[] = [], hasMaliOpenCL = false) {
|
private static getHWCodecConfig(config: SystemConfigFFmpegDto, interfaces: VideoInterfaces) {
|
||||||
let handler: VideoCodecHWConfig;
|
let handler: VideoCodecHWConfig;
|
||||||
switch (config.accel) {
|
switch (config.accel) {
|
||||||
case TranscodeHWAccel.NVENC: {
|
case TranscodeHWAccel.NVENC: {
|
||||||
handler = config.accelDecode ? new NvencHwDecodeConfig(config) : new NvencSwDecodeConfig(config);
|
handler = config.accelDecode
|
||||||
|
? new NvencHwDecodeConfig(config, interfaces)
|
||||||
|
: new NvencSwDecodeConfig(config, interfaces);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TranscodeHWAccel.QSV: {
|
case TranscodeHWAccel.QSV: {
|
||||||
handler = config.accelDecode ? new QsvHwDecodeConfig(config, devices) : new QsvSwDecodeConfig(config, devices);
|
handler = config.accelDecode
|
||||||
|
? new QsvHwDecodeConfig(config, interfaces)
|
||||||
|
: new QsvSwDecodeConfig(config, interfaces);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TranscodeHWAccel.VAAPI: {
|
case TranscodeHWAccel.VAAPI: {
|
||||||
handler = config.accelDecode
|
handler = config.accelDecode
|
||||||
? new VaapiHwDecodeConfig(config, devices)
|
? new VaapiHwDecodeConfig(config, interfaces)
|
||||||
: new VaapiSwDecodeConfig(config, devices);
|
: new VaapiSwDecodeConfig(config, interfaces);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TranscodeHWAccel.RKMPP: {
|
case TranscodeHWAccel.RKMPP: {
|
||||||
handler = config.accelDecode
|
handler = config.accelDecode
|
||||||
? new RkmppHwDecodeConfig(config, devices, hasMaliOpenCL)
|
? new RkmppHwDecodeConfig(config, interfaces)
|
||||||
: new RkmppSwDecodeConfig(config, devices);
|
: new RkmppSwDecodeConfig(config, interfaces);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
|
@ -323,13 +328,15 @@ export class BaseConfig implements VideoCodecSWConfig {
|
||||||
|
|
||||||
export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig {
|
export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig {
|
||||||
protected device: string;
|
protected device: string;
|
||||||
|
protected interfaces: VideoInterfaces;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
protected config: SystemConfigFFmpegDto,
|
protected config: SystemConfigFFmpegDto,
|
||||||
devices: string[] = [],
|
interfaces: VideoInterfaces,
|
||||||
) {
|
) {
|
||||||
super(config);
|
super(config);
|
||||||
this.device = this.getDevice(devices);
|
this.interfaces = interfaces;
|
||||||
|
this.device = this.getDevice(interfaces);
|
||||||
}
|
}
|
||||||
|
|
||||||
getSupportedCodecs() {
|
getSupportedCodecs() {
|
||||||
|
@ -346,16 +353,16 @@ export class BaseHWConfig extends BaseConfig implements VideoCodecHWConfig {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
getDevice(devices: string[]) {
|
getDevice({ dri }: VideoInterfaces) {
|
||||||
if (this.config.preferredHwDevice === 'auto') {
|
if (this.config.preferredHwDevice === 'auto') {
|
||||||
// eslint-disable-next-line unicorn/no-array-reduce
|
// eslint-disable-next-line unicorn/no-array-reduce
|
||||||
return `/dev/dri/${this.validateDevices(devices).reduce(function (a, b) {
|
return `/dev/dri/${this.validateDevices(dri).reduce(function (a, b) {
|
||||||
return a.localeCompare(b) < 0 ? b : a;
|
return a.localeCompare(b) < 0 ? b : a;
|
||||||
})}`;
|
})}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
const deviceName = this.config.preferredHwDevice.replace('/dev/dri/', '');
|
const deviceName = this.config.preferredHwDevice.replace('/dev/dri/', '');
|
||||||
if (!devices.includes(deviceName)) {
|
if (!dri.includes(deviceName)) {
|
||||||
throw new Error(`Device '${deviceName}' does not exist. If using Docker, make sure this device is mounted`);
|
throw new Error(`Device '${deviceName}' does not exist. If using Docker, make sure this device is mounted`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -886,13 +893,6 @@ export class VaapiHwDecodeConfig extends VaapiSwDecodeConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class RkmppSwDecodeConfig extends BaseHWConfig {
|
export class RkmppSwDecodeConfig extends BaseHWConfig {
|
||||||
constructor(
|
|
||||||
protected config: SystemConfigFFmpegDto,
|
|
||||||
devices: string[] = [],
|
|
||||||
) {
|
|
||||||
super(config, devices);
|
|
||||||
}
|
|
||||||
|
|
||||||
eligibleForTwoPass(): boolean {
|
eligibleForTwoPass(): boolean {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -937,16 +937,6 @@ export class RkmppSwDecodeConfig extends BaseHWConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class RkmppHwDecodeConfig extends RkmppSwDecodeConfig {
|
export class RkmppHwDecodeConfig extends RkmppSwDecodeConfig {
|
||||||
protected hasMaliOpenCL: boolean;
|
|
||||||
constructor(
|
|
||||||
protected config: SystemConfigFFmpegDto,
|
|
||||||
devices: string[] = [],
|
|
||||||
hasMaliOpenCL = false,
|
|
||||||
) {
|
|
||||||
super(config, devices);
|
|
||||||
this.hasMaliOpenCL = hasMaliOpenCL;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBaseInputOptions() {
|
getBaseInputOptions() {
|
||||||
return ['-hwaccel rkmpp', '-hwaccel_output_format drm_prime', '-afbc rga', '-noautorotate'];
|
return ['-hwaccel rkmpp', '-hwaccel_output_format drm_prime', '-afbc rga', '-noautorotate'];
|
||||||
}
|
}
|
||||||
|
@ -954,7 +944,7 @@ export class RkmppHwDecodeConfig extends RkmppSwDecodeConfig {
|
||||||
getFilterOptions(videoStream: VideoStreamInfo) {
|
getFilterOptions(videoStream: VideoStreamInfo) {
|
||||||
if (this.shouldToneMap(videoStream)) {
|
if (this.shouldToneMap(videoStream)) {
|
||||||
const { primaries, transfer, matrix } = this.getColors();
|
const { primaries, transfer, matrix } = this.getColors();
|
||||||
if (this.hasMaliOpenCL) {
|
if (this.interfaces.mali) {
|
||||||
return [
|
return [
|
||||||
// use RKMPP for scaling, OpenCL for tone mapping
|
// use RKMPP for scaling, OpenCL for tone mapping
|
||||||
`scale_rkrga=${this.getScaling(videoStream)}:format=p010:afbc=1:async_depth=4`,
|
`scale_rkrga=${this.getScaling(videoStream)}:format=p010:afbc=1:async_depth=4`,
|
||||||
|
|
Loading…
Reference in a new issue