Made the service basically start working
This commit is contained in:
@@ -4,6 +4,14 @@ WORKDIR /app
|
||||
EXPOSE 8080
|
||||
EXPOSE 8081
|
||||
|
||||
FROM debian:stable-slim AS ffmpeg
|
||||
RUN apt-get update && apt-get install -y unzip
|
||||
WORKDIR /ffmpeg
|
||||
ADD "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffmpeg-6.1-linux-64.zip" ./ffmpeg.zip
|
||||
ADD "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffprobe-6.1-linux-64.zip" ./ffprobe.zip
|
||||
RUN unzip ./ffmpeg.zip -d ./bin/
|
||||
RUN unzip ./ffprobe.zip -d ./bin/
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||
ARG BUILD_CONFIGURATION=Release
|
||||
WORKDIR /src
|
||||
@@ -20,4 +28,5 @@ RUN dotnet publish "./Encoder.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p
|
||||
FROM base AS final
|
||||
WORKDIR /app
|
||||
COPY --from=publish /app/publish .
|
||||
COPY --from=ffmpeg /ffmpeg/bin/ /app/ffmpeg/
|
||||
ENTRYPOINT ["dotnet", "Encoder.dll"]
|
||||
|
||||
@@ -1,6 +1,42 @@
|
||||
@Encoder_HostAddress = http://localhost:5257
|
||||
|
||||
GET {{Encoder_HostAddress}}/weatherforecast/
|
||||
GET {{Encoder_HostAddress}}/status
|
||||
Accept: application/json
|
||||
###
|
||||
|
||||
POST {{Encoder_HostAddress}}/encode
|
||||
Content-Type: multipart/form-data; boundary=WebAppBoundary
|
||||
|
||||
--WebAppBoundary
|
||||
Content-Disposition: form-data; name="video"; filename="testVid8k.mp4"
|
||||
|
||||
< ../EncodingSampleTest/testVid8k.mp4
|
||||
--WebAppBoundary--
|
||||
|
||||
###
|
||||
|
||||
POST {{Encoder_HostAddress}}/encode
|
||||
Content-Type: multipart/form-data; boundary=WebAppBoundary
|
||||
|
||||
--WebAppBoundary
|
||||
Content-Disposition: form-data; name="video"; filename="testVid6k.mp4"
|
||||
|
||||
< ../EncodingSampleTest/testVid6K.mp4
|
||||
--WebAppBoundary--
|
||||
|
||||
###
|
||||
|
||||
POST {{Encoder_HostAddress}}/encode
|
||||
Content-Type: multipart/form-data; boundary=WebAppBoundary
|
||||
|
||||
--WebAppBoundary
|
||||
Content-Disposition: form-data; name="video"; filename="testVid.mp4"
|
||||
|
||||
< ../EncodingSampleTest/testVid.mp4
|
||||
--WebAppBoundary--
|
||||
|
||||
###
|
||||
|
||||
GET {{Encoder_HostAddress}}/file/e897d62f-1134-4017-b8a2-c3d1c4994bc6
|
||||
Accept: video/mp4
|
||||
###
|
||||
@@ -1,9 +1,51 @@
|
||||
using System.Numerics;
|
||||
using FFMpegCore;
|
||||
using FFMpegCore.Enums;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace Encoder;
|
||||
|
||||
public class EncoderService : IEncoderService {
|
||||
public class EncoderService : BackgroundService, IEncoderService {
|
||||
Queue<EncodingJob> JobQueue;
|
||||
public EncoderService(EncoderServiceOptions options) {
|
||||
JobQueue = new Queue<EncodingJob>();
|
||||
List<EncodingJob> Jobs = new();
|
||||
ILogger<EncoderService> Logger;
|
||||
FFmpegOptions options;
|
||||
|
||||
public EncoderService(ILogger<EncoderService> logger, IOptions<FFmpegOptions> ffmpegOptions) {
|
||||
Logger = logger;
|
||||
options = ffmpegOptions.Value;
|
||||
options.FfmpegPath = Path.GetFullPath(Path.Combine(Path.GetDirectoryName(Environment.ProcessPath), options.FfmpegPath));
|
||||
options.TemporaryFilesPath = Path.GetFullPath(Path.Combine(Path.GetDirectoryName(Environment.ProcessPath), options.TemporaryFilesPath));
|
||||
Directory.CreateDirectory(options.TemporaryFilesPath); // Ensure the temporary files directory exists
|
||||
JobQueue = new();
|
||||
|
||||
logger.Log(LogLevel.Information,
|
||||
$"""
|
||||
Starting Encoder Service with options:
|
||||
TemporaryFilesPath: {options.TemporaryFilesPath}
|
||||
FfmpegPath: {options.FfmpegPath}"
|
||||
""");
|
||||
|
||||
GlobalFFOptions.Configure(ffOptions => {
|
||||
ffOptions.BinaryFolder = options.FfmpegPath;
|
||||
ffOptions.TemporaryFilesFolder = options.TemporaryFilesPath;
|
||||
});
|
||||
}
|
||||
|
||||
protected override Task ExecuteAsync(CancellationToken stoppingToken) => ProcessJobs(stoppingToken);
|
||||
|
||||
Task ProcessJobs(CancellationToken stoppingToken) {
|
||||
while (!stoppingToken.IsCancellationRequested) {
|
||||
if (JobQueue.Count > 0) {
|
||||
// Grab a reference to the next job in queue
|
||||
var job = JobQueue.Peek();
|
||||
ProcessJob(job); // Process the job
|
||||
JobQueue.Dequeue(); // Remove it from the queue
|
||||
Jobs.Add(job); // Add it to the completed jobs list
|
||||
}
|
||||
Thread.Sleep(5); // Prevent tight loop
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Guid EnqueueJob(EncodingJob job) {
|
||||
@@ -12,28 +54,55 @@ public class EncoderService : IEncoderService {
|
||||
}
|
||||
|
||||
public EncodingJob? GetJobStatus(Guid jobId) {
|
||||
return JobQueue.FirstOrDefault(j => j.Id == jobId);
|
||||
return Jobs.FirstOrDefault(j => j.Id == jobId);
|
||||
}
|
||||
|
||||
public void RemoveJob(Guid jobId) {
|
||||
var job = JobQueue.FirstOrDefault(j => j.Id == jobId);
|
||||
if (job != null) {
|
||||
var tempQueue = new Queue<EncodingJob>();
|
||||
while (JobQueue.Count > 0) {
|
||||
var currentJob = JobQueue.Dequeue();
|
||||
if (currentJob.Id != jobId) {
|
||||
tempQueue.Enqueue(currentJob);
|
||||
}
|
||||
}
|
||||
JobQueue = tempQueue;
|
||||
public IEnumerable<EncodingJob> GetJobs() {
|
||||
return JobQueue.Concat(Jobs);
|
||||
}
|
||||
|
||||
void ProcessJob(EncodingJob job) {
|
||||
job.Status = JobStatus.InProgress;
|
||||
var file = job.OrigFilePath;
|
||||
var mediaInfo = FFProbe.Analyse(file);
|
||||
if (mediaInfo.PrimaryVideoStream == null) {
|
||||
job.Status = JobStatus.Failed;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
public Task ProcessNextJob() {
|
||||
var job = JobQueue.Dequeue();
|
||||
// Encode....
|
||||
|
||||
|
||||
return Task.CompletedTask;
|
||||
var W = mediaInfo.PrimaryVideoStream.Width;
|
||||
var H = mediaInfo.PrimaryVideoStream.Height;
|
||||
string outputPath = Path.Combine(options.TemporaryFilesPath, Path.GetFileName(job.OrigFilePath));
|
||||
int qp = Utils.ToQPValue(W, H);
|
||||
var status = FFMpegArguments.FromFileInput(file, true, args => args.WithHardwareAcceleration())
|
||||
.OutputToFile(outputPath, true, args => args
|
||||
.CopyChannel(Channel.Audio)
|
||||
.CopyChannel(Channel.Subtitle)
|
||||
.WithVideoCodec("av1_nvenc")
|
||||
.WithArgument(new NvencSpeedPreset(NvencSpeed.p2))
|
||||
.WithArgument(new NvencTuneArgument(NvencTune.hq))
|
||||
.WithArgument(new NvencHighBitDepthArgument(true))
|
||||
.WithArgument(new NvencQPArgument((byte)qp))
|
||||
.WithFastStart()
|
||||
)
|
||||
.NotifyOnProgress(progress => {
|
||||
Logger.Log(LogLevel.Information,
|
||||
$"""
|
||||
Job {job.Id}: {progress / mediaInfo.Duration:P}
|
||||
Processed {progress:g} | Total {mediaInfo.Duration:g}
|
||||
Using AV1 NVENC with QP={qp} for {W}x{H}@{mediaInfo.PrimaryVideoStream.FrameRate}.
|
||||
In path: {file}
|
||||
Output path: {outputPath}
|
||||
""");
|
||||
job.Progress = (float)(progress / mediaInfo.Duration);
|
||||
})
|
||||
.ProcessSynchronously();
|
||||
if(status) {
|
||||
job.Status = JobStatus.Completed;
|
||||
job.EncodedFilePath = outputPath;
|
||||
} else {
|
||||
job.Status = JobStatus.Failed;
|
||||
}
|
||||
job.CompletedAt = DateTime.Now;
|
||||
job.Progress = 1.0f;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
namespace Encoder;
|
||||
|
||||
public record class EncoderServiceOptions {
|
||||
public string OutputPath { get; init; } = Path.GetTempPath();
|
||||
public string FfmpegPath { get; init; } = Path.Combine(Environment.ProcessPath, "ffmpeg");
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
namespace Encoder;
|
||||
|
||||
public record EncodingJob(Guid Id) {
|
||||
public record EncodingJob(Guid Id, string OrigFilePath) {
|
||||
public JobStatus Status { get; set; } = JobStatus.Pending;
|
||||
public DateTime CreatedAt { get; init; } = DateTime.Now;
|
||||
public DateTime? CompletedAt { get; set; } = null;
|
||||
public string OrigFilePath { get; init; } = string.Empty;
|
||||
public string EncodedFilePath { get; set; } = string.Empty;
|
||||
public float Progress { get; set; } = 0.0f;
|
||||
}
|
||||
7
Encoder/FfmpegOptions.cs
Normal file
7
Encoder/FfmpegOptions.cs
Normal file
@@ -0,0 +1,7 @@
|
||||
namespace Encoder;
|
||||
|
||||
public class FFmpegOptions {
|
||||
public const string SectionName = "FFmpeg";
|
||||
public string TemporaryFilesPath { get; set; } = Path.GetTempPath();
|
||||
public string FfmpegPath { get; set; } = Path.Combine(Environment.ProcessPath!, "ffmpeg");
|
||||
}
|
||||
@@ -3,5 +3,4 @@ namespace Encoder;
|
||||
public interface IEncoderService {
|
||||
public Guid EnqueueJob(EncodingJob job);
|
||||
public EncodingJob? GetJobStatus(Guid jobId);
|
||||
public void RemoveJob(Guid jobId);
|
||||
}
|
||||
41
Encoder/NvencExt.cs
Normal file
41
Encoder/NvencExt.cs
Normal file
@@ -0,0 +1,41 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace Encoder;
|
||||
|
||||
public enum NvencSpeed {
|
||||
Default=0,
|
||||
Slow=1,
|
||||
Medium=2,
|
||||
Fast=3,
|
||||
p1=12,
|
||||
p2=13,
|
||||
p3=14,
|
||||
p4=15,
|
||||
p5=16,
|
||||
p6=17,
|
||||
p7=18,
|
||||
}
|
||||
|
||||
public enum NvencTune {
|
||||
hq=1,
|
||||
uhq=5,
|
||||
ll=2,
|
||||
ull=3,
|
||||
lossless=4,
|
||||
}
|
||||
|
||||
class NvencSpeedPreset(NvencSpeed speed) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return $"-preset {speed.ToString().ToLower()}"; } }
|
||||
}
|
||||
|
||||
class NvencTuneArgument(NvencTune tune) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return $"-tune {tune.ToString().ToLower()}"; } }
|
||||
}
|
||||
|
||||
class NvencHighBitDepthArgument(bool enable) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return enable ? "-highbitdepth true" : string.Empty; } }
|
||||
}
|
||||
|
||||
class NvencQPArgument([Range(-1, 255)]byte qp) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return $"-qp {qp}"; } }
|
||||
}
|
||||
5
Encoder/OpenApi.http
Normal file
5
Encoder/OpenApi.http
Normal file
@@ -0,0 +1,5 @@
|
||||
### GET request to example server
|
||||
GET https://examples.http-client.intellij.net/get
|
||||
?generated-in=JetBrains Rider
|
||||
|
||||
###
|
||||
@@ -1,15 +1,21 @@
|
||||
using Encoder;
|
||||
using Microsoft.AspNetCore.Http.Features;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
//Settings
|
||||
string? tmpFilePath = builder.Configuration.GetValue<string>("TempFilePath");
|
||||
string? ffmpegPath = builder.Configuration.GetValue<string>("FfmpegPath");
|
||||
//Services
|
||||
builder.Services.AddOpenApi();
|
||||
builder.Services.AddLogging();
|
||||
|
||||
string uploadsPath = builder.Configuration.GetSection("UploadsPath").Get<string>() ?? "./Uploads";
|
||||
if(!Path.IsPathRooted(uploadsPath)) uploadsPath = Path.GetFullPath(Path.Combine(Path.GetDirectoryName(Environment.ProcessPath), uploadsPath));
|
||||
Directory.CreateDirectory(uploadsPath); // Ensure the uploads directory exists
|
||||
|
||||
builder.Services.Configure<FFmpegOptions>(builder.Configuration.GetSection(FFmpegOptions.SectionName));
|
||||
builder.Services.AddSingleton<EncoderService>();
|
||||
builder.Services.AddHostedService<EncoderService>(p => p.GetRequiredService<EncoderService>());
|
||||
|
||||
var encoderOptions = new EncoderServiceOptions { OutputPath = tmpFilePath };
|
||||
builder.Services.AddSingleton<IEncoderService, EncoderService>(_ => new (encoderOptions));
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
@@ -22,6 +28,9 @@ app.UseHttpsRedirection();
|
||||
// Returns the ID of the job handling the encoding
|
||||
app.MapPost("encode", context =>
|
||||
{
|
||||
// Disable request size limit
|
||||
context.Features.Get<IHttpMaxRequestBodySizeFeature>()?.MaxRequestBodySize = null;
|
||||
|
||||
var request = context.Request;
|
||||
if (!request.HasFormContentType) {
|
||||
context.Response.StatusCode = 400;
|
||||
@@ -29,14 +38,20 @@ app.MapPost("encode", context =>
|
||||
}
|
||||
|
||||
var form = request.Form;
|
||||
var file = form.Files.GetFile("video");
|
||||
var file = form.Files.GetFile("video"); // Contrary to what it seems, the "name" here is the form field name, not the file name
|
||||
|
||||
if (file == null) {
|
||||
context.Response.StatusCode = 400;
|
||||
return context.Response.WriteAsync("No video file provided.");
|
||||
}
|
||||
|
||||
var job = new EncodingJob(Guid.NewGuid());
|
||||
// Save the file to a temporary location
|
||||
var jobGuid = Guid.NewGuid();
|
||||
var tempFilePath = Path.GetFullPath(Path.Combine(uploadsPath, jobGuid.ToString("D")+Path.GetExtension(file.FileName)));
|
||||
using (var stream = File.Create(tempFilePath))
|
||||
file.CopyTo(stream);
|
||||
|
||||
var job = new EncodingJob(jobGuid, tempFilePath);
|
||||
var encSrv = context.RequestServices.GetService<EncoderService>();
|
||||
if (encSrv != null) encSrv.EnqueueJob(job);
|
||||
else {
|
||||
@@ -45,7 +60,20 @@ app.MapPost("encode", context =>
|
||||
}
|
||||
|
||||
context.Response.StatusCode = 200;
|
||||
return context.Response.WriteAsJsonAsync(new { JobId = job.Id });
|
||||
return context.Response.WriteAsJsonAsync(new { JobId = jobGuid });
|
||||
}).WithFormOptions(multipartBodyLengthLimit: 1024L*1024L*1024L*64L);
|
||||
|
||||
app.MapGet("status", (context) => {
|
||||
var encSrv = context.RequestServices.GetService<EncoderService>();
|
||||
if (encSrv == null) {
|
||||
context.Response.StatusCode = 500;
|
||||
return context.Response.WriteAsync("Encoder service not available.");
|
||||
}
|
||||
|
||||
var jobs = encSrv.GetJobs().ToArray();
|
||||
|
||||
context.Response.StatusCode = 200;
|
||||
return context.Response.WriteAsJsonAsync(jobs);
|
||||
});
|
||||
|
||||
// Check the status of an encoding job by its ID
|
||||
@@ -84,4 +112,6 @@ app.MapGet("file/{jobId:guid}", (HttpContext context, Guid jobId) =>
|
||||
return Results.File(fileBytes, "video/mp4", Path.GetFileName(filePath), enableRangeProcessing:true);
|
||||
});
|
||||
|
||||
app.MapOpenApi("openapi.json");
|
||||
|
||||
app.Run();
|
||||
38
Encoder/Utils.cs
Normal file
38
Encoder/Utils.cs
Normal file
@@ -0,0 +1,38 @@
|
||||
namespace Encoder;
|
||||
|
||||
public static class Utils {
|
||||
static Tuple<int, int>[] QPTable = new Tuple<int, int>[] {
|
||||
new(1280*720, 64),
|
||||
new(1920*1080, 96),
|
||||
new(3840*2160, 128),
|
||||
new(5760*2880, 96), //VR6K
|
||||
new(8128*4096, 120) //VR8K
|
||||
}.OrderBy(t => t.Item1).ToArray();
|
||||
|
||||
static float lerp(float v0, float v1, float t) {
|
||||
return v0 + t * (v1 - v0);
|
||||
}
|
||||
|
||||
static float remap(float value, float from1, float to1, float from2, float to2) {
|
||||
return from2 + (value - from1) * (to2 - from2) / (to1 - from1);
|
||||
}
|
||||
|
||||
public static int ToQPValue(int W, int H) {
|
||||
int pixels = W * H;
|
||||
for (var i = 0; i < QPTable.Length; i++) {
|
||||
var t = QPTable[i];
|
||||
if (pixels <= t.Item1) {
|
||||
var minQP = QPTable[i - 1].Item2;
|
||||
var maxQP = QPTable[i].Item2;
|
||||
|
||||
var minPixels = QPTable[i - 1].Item1;
|
||||
var maxPixels = QPTable[i].Item1;
|
||||
var tPixels = remap(pixels, minPixels, maxPixels, 0, 1);
|
||||
|
||||
return (int)lerp(minQP, maxQP, tPixels);
|
||||
}
|
||||
}
|
||||
// Return the highest QP for anything higher than 8K VR
|
||||
return QPTable.Last().Item2;
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,9 @@
|
||||
}
|
||||
},
|
||||
"AllowedHosts": "*",
|
||||
"TemporaryFilesPath": "./Temp",
|
||||
"FfmpegPath": "./ffmpeg"
|
||||
"FFmpeg": {
|
||||
"TemporaryFilesPath": "./Temp",
|
||||
"FfmpegPath": "./ffmpeg"
|
||||
},
|
||||
"UploadsPath": "./Uploads"
|
||||
}
|
||||
|
||||
32
EncodingSampleTest/EncodingSampleTest.csproj
Normal file
32
EncodingSampleTest/EncodingSampleTest.csproj
Normal file
@@ -0,0 +1,32 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FFMpegCore" Version="5.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Update="testVid.mp4">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="vendor\ffmpeg.exe">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="vendor\ffprobe.exe">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="testVid6K.mp4">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="testVid8k.mp4">
|
||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
2
EncodingSampleTest/EncodingSampleTest.csproj.DotSettings
Normal file
2
EncodingSampleTest/EncodingSampleTest.csproj.DotSettings
Normal file
@@ -0,0 +1,2 @@
|
||||
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
|
||||
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=vendor/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
|
||||
41
EncodingSampleTest/NvencExt.cs
Normal file
41
EncodingSampleTest/NvencExt.cs
Normal file
@@ -0,0 +1,41 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace EncodingSampleTest;
|
||||
|
||||
public enum NvencSpeed {
|
||||
Default=0,
|
||||
Slow=1,
|
||||
Medium=2,
|
||||
Fast=3,
|
||||
p1=12,
|
||||
p2=13,
|
||||
p3=14,
|
||||
p4=15,
|
||||
p5=16,
|
||||
p6=17,
|
||||
p7=18,
|
||||
}
|
||||
|
||||
public enum NvencTune {
|
||||
hq=1,
|
||||
uhq=5,
|
||||
ll=2,
|
||||
ull=3,
|
||||
lossless=4,
|
||||
}
|
||||
|
||||
class NvencSpeedPreset(NvencSpeed speed) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return $"-preset {speed.ToString().ToLower()}"; } }
|
||||
}
|
||||
|
||||
class NvencTuneArgument(NvencTune tune) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return $"-tune {tune.ToString().ToLower()}"; } }
|
||||
}
|
||||
|
||||
class NvencHighBitDepthArgument(bool enable) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return enable ? "-highbitdepth true" : string.Empty; } }
|
||||
}
|
||||
|
||||
class NvencQPArgument([Range(-1, 255)]byte qp) : FFMpegCore.Arguments.IArgument {
|
||||
public string Text { get { return $"-qp {qp}"; } }
|
||||
}
|
||||
65
EncodingSampleTest/Program.cs
Normal file
65
EncodingSampleTest/Program.cs
Normal file
@@ -0,0 +1,65 @@
|
||||
using System;
|
||||
using EncodingSampleTest;
|
||||
using FFMpegCore;
|
||||
using FFMpegCore.Enums;
|
||||
|
||||
string ffmpegPath = Path.Combine(Environment.CurrentDirectory, "vendor");
|
||||
string tempPath = Path.Combine(Environment.CurrentDirectory, "tmp");
|
||||
string outputDir = Path.Combine(Environment.CurrentDirectory, "output");
|
||||
Directory.CreateDirectory(outputDir);
|
||||
Directory.CreateDirectory(tempPath);
|
||||
const string inputFile = "testVid8k.mp4";
|
||||
|
||||
GlobalFFOptions.Configure(options => {
|
||||
options.BinaryFolder = ffmpegPath;
|
||||
options.TemporaryFilesFolder = tempPath;
|
||||
});
|
||||
|
||||
if(!File.Exists(inputFile)) return;
|
||||
|
||||
var mediaInfo = FFProbe.Analyse(inputFile);
|
||||
if (mediaInfo.PrimaryVideoStream == null) {
|
||||
Console.WriteLine("No video stream found.");
|
||||
return;
|
||||
}
|
||||
var W = mediaInfo.PrimaryVideoStream.Width;
|
||||
var H = mediaInfo.PrimaryVideoStream.Height;
|
||||
|
||||
for (int qp = 32; qp <= 160; qp += 32) {
|
||||
await AV1Encode(W, H, qp);
|
||||
}
|
||||
|
||||
/*
|
||||
for (int qp = 32; qp <= 250; qp += 32) {
|
||||
await AV1Encode(1920, 1080, qp);
|
||||
}*/
|
||||
|
||||
|
||||
|
||||
Task AV1Encode(int W = -1, int H = -1, int QP = 23) { // AV1 is visually lossless at QP 23
|
||||
var outputFile = Path.Combine(outputDir, $"output_av1-{W}x{H}_qp{QP}.mp4");
|
||||
var ffmpegArgs = FFMpegArguments
|
||||
.FromFileInput(inputFile, true, options => options
|
||||
.WithHardwareAcceleration()
|
||||
)
|
||||
.OutputToFile(outputFile, true, options => options
|
||||
.CopyChannel(Channel.Audio)
|
||||
.CopyChannel(Channel.Subtitle)
|
||||
.WithVideoCodec("hevc_nvenc")
|
||||
.WithArgument(new NvencSpeedPreset(NvencSpeed.p4))
|
||||
.WithArgument(new NvencTuneArgument(NvencTune.hq))
|
||||
.WithArgument(new NvencHighBitDepthArgument(true))
|
||||
.WithArgument(new NvencQPArgument((byte)QP))
|
||||
.WithVideoFilters(filterOptions => {
|
||||
if (W > 0 && H > 0) filterOptions.Scale(W, H);
|
||||
})
|
||||
.WithFastStart()
|
||||
)
|
||||
.WithLogLevel(FFMpegLogLevel.Info)
|
||||
.NotifyOnProgress(progress => Console.WriteLine($"Encoding {outputFile}: {progress:g}/{mediaInfo.Duration:g} {progress/mediaInfo.Duration:P}"));
|
||||
|
||||
Console.WriteLine(ffmpegArgs.Arguments);
|
||||
|
||||
var res = ffmpegArgs.ProcessSynchronously();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
@@ -3,4 +3,5 @@
|
||||
<File Path="compose.yaml" />
|
||||
</Folder>
|
||||
<Project Path="Encoder/Encoder.csproj" />
|
||||
<Project Path="EncodingSampleTest/EncodingSampleTest.csproj" />
|
||||
</Solution>
|
||||
|
||||
@@ -4,4 +4,10 @@
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Encoder/Dockerfile
|
||||
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
Reference in New Issue
Block a user