Manager, refactor access to JS script files
Refactor the JS script file loading code so that it's tied to the `fs.FS` interface for longer, and less to the specifics of our `embed.FS` instance. This should make it possible to use other filesystems, like a real on-disk one, to load scripts.
This commit is contained in:
parent
2d05e1c773
commit
7a89c07fc9
@ -59,7 +59,7 @@ func Load(ts TimeService) (*Service, error) {
|
||||
}
|
||||
|
||||
staticFileLoader := func(path string) ([]byte, error) {
|
||||
content, err := compiler.loadScriptBytes(path)
|
||||
content, err := compiler.loadScriptBytes(scriptsFS, path)
|
||||
if err != nil {
|
||||
// The 'require' module uses this to try different variations of the path
|
||||
// in order to find it (without .js, with .js, etc.), so don't log any of
|
||||
|
@ -0,0 +1,6 @@
|
||||
These scripts are used for some (but far from all) unit tests.
|
||||
|
||||
It's mostly a place where we can add some extra files for the tests (like this
|
||||
`README.md` and `empty.js`) that should be ignored by the job compiler, without
|
||||
them getting embedded into the `flamenco-manager` executable file in production
|
||||
builds.
|
@ -0,0 +1,46 @@
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
|
||||
/* Example job JSON:
|
||||
|
||||
{
|
||||
"metadata": {
|
||||
"project": "Sprite Fright",
|
||||
"user.email": "sybren@blender.org",
|
||||
"user.name": "Sybren Stüvel"
|
||||
},
|
||||
"type": "echo-sleep-test",
|
||||
"name": "pošalji poruku i idi na spavanje",
|
||||
"priority": 50,
|
||||
"settings": {
|
||||
"message": "prespavati",
|
||||
"sleep_duration_seconds": 3
|
||||
}
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
const JOB_TYPE = {
|
||||
label: "Echo Sleep Test",
|
||||
settings: [
|
||||
{ key: "message", type: "string", required: true },
|
||||
{ key: "sleep_duration_seconds", type: "int32", default: 1 },
|
||||
{ key: "sleep_repeats", type: "int32", default: 1 },
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
function compileJob(job) {
|
||||
const settings = job.settings;
|
||||
|
||||
const echoTask = author.Task("echo", "misc");
|
||||
echoTask.addCommand(author.Command("echo", {message: settings.message}));
|
||||
job.addTask(echoTask);
|
||||
|
||||
for (let repeat=0; repeat < settings.sleep_repeats; repeat++) {
|
||||
const sleepTask = author.Task("sleep", "misc")
|
||||
sleepTask.addCommand(author.Command("sleep", {duration_in_seconds: settings.sleep_duration_seconds}))
|
||||
sleepTask.addDependency(echoTask); // Ensure sleeping happens after echo, and not at the same time.
|
||||
job.addTask(sleepTask);
|
||||
}
|
||||
}
|
@ -0,0 +1,201 @@
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
const JOB_TYPE = {
|
||||
label: "Simple Blender Render",
|
||||
settings: [
|
||||
// Settings for artists to determine:
|
||||
{ key: "frames", type: "string", required: true, eval: "f'{C.scene.frame_start}-{C.scene.frame_end}'",
|
||||
description: "Frame range to render. Examples: '47', '1-30', '3, 5-10, 47-327'" },
|
||||
{ key: "chunk_size", type: "int32", default: 1, description: "Number of frames to render in one Blender render task" },
|
||||
|
||||
// render_output_root + add_path_components determine the value of render_output_path.
|
||||
{ key: "render_output_root", type: "string", subtype: "dir_path", required: true, visible: "submission",
|
||||
description: "Base directory of where render output is stored. Will have some job-specific parts appended to it"},
|
||||
{ key: "add_path_components", type: "int32", required: true, default: 0, propargs: {min: 0, max: 32}, visible: "submission",
|
||||
description: "Number of path components of the current blend file to use in the render output path"},
|
||||
{ key: "render_output_path", type: "string", subtype: "file_path", editable: false,
|
||||
eval: "str(Path(settings.render_output_root) / last_n_dir_parts(settings.add_path_components) / jobname / '{timestamp}' / '######')",
|
||||
description: "Final file path of where render output will be saved"},
|
||||
|
||||
// Automatically evaluated settings:
|
||||
{ key: "blender_cmd", type: "string", default: "{blender}", visible: "hidden" },
|
||||
{ key: "blendfile", type: "string", required: true, description: "Path of the Blend file to render", visible: "hidden" },
|
||||
{ key: "fps", type: "float", eval: "C.scene.render.fps / C.scene.render.fps_base", visible: "hidden" },
|
||||
{
|
||||
key: "images_or_video",
|
||||
type: "string",
|
||||
required: true,
|
||||
choices: ["images", "video"],
|
||||
visible: "hidden",
|
||||
eval: "'video' if C.scene.render.image_settings.file_format in {'FFMPEG', 'AVI_RAW', 'AVI_JPEG'} else 'images'"
|
||||
},
|
||||
{ key: "format", type: "string", required: true, eval: "C.scene.render.image_settings.file_format", visible: "web" },
|
||||
{ key: "image_file_extension", type: "string", required: true, eval: "C.scene.render.file_extension", visible: "hidden",
|
||||
description: "File extension used when rendering images; ignored when images_or_video='video'" },
|
||||
{ key: "video_container_format", type: "string", required: true, eval: "C.scene.render.ffmpeg.format", visible: "hidden",
|
||||
description: "Container format used when rendering video; ignored when images_or_video='images'" },
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
// Set of scene.render.image_settings.file_format values that produce
|
||||
// files which FFmpeg is known not to handle as input.
|
||||
const ffmpegIncompatibleImageFormats = new Set([
|
||||
"EXR",
|
||||
"MULTILAYER", // Old CLI-style format indicators
|
||||
"OPEN_EXR",
|
||||
"OPEN_EXR_MULTILAYER", // DNA values for these formats.
|
||||
]);
|
||||
|
||||
// Mapping from video container (scene.render.ffmpeg.format) to the file name
|
||||
// extension typically used to store those videos.
|
||||
const videoContainerToExtension = {
|
||||
"QUICKTIME": ".mov",
|
||||
"MPEG1": ".mpg",
|
||||
"MPEG2": ".dvd",
|
||||
"MPEG4": ".mp4",
|
||||
"OGG": ".ogv",
|
||||
"FLASH": ".flv",
|
||||
};
|
||||
|
||||
function compileJob(job) {
|
||||
print("Blender Render job submitted");
|
||||
print("job: ", job);
|
||||
|
||||
|
||||
const renderOutput = renderOutputPath(job);
|
||||
job.settings.render_output_path = renderOutput;
|
||||
|
||||
const finalDir = path.dirname(renderOutput);
|
||||
const renderDir = intermediatePath(job, finalDir);
|
||||
|
||||
const settings = job.settings;
|
||||
const renderTasks = authorRenderTasks(settings, renderDir, renderOutput);
|
||||
const videoTask = authorCreateVideoTask(settings, renderDir);
|
||||
const cleanupTask = authorCleanupTask(finalDir, renderDir);
|
||||
|
||||
for (const rt of renderTasks) {
|
||||
cleanupTask.addDependency(rt);
|
||||
job.addTask(rt);
|
||||
}
|
||||
if (videoTask) {
|
||||
// If there is a video task, all other tasks have to be done first.
|
||||
for (const rt of renderTasks) {
|
||||
videoTask.addDependency(rt);
|
||||
}
|
||||
cleanupTask.addDependency(videoTask);
|
||||
job.addTask(videoTask);
|
||||
}
|
||||
job.addTask(cleanupTask);
|
||||
}
|
||||
|
||||
// Do field replacement on the render output path.
|
||||
function renderOutputPath(job) {
|
||||
let path = job.settings.render_output_path;
|
||||
if (!path) {
|
||||
throw "no render_output_path setting!";
|
||||
}
|
||||
return path.replace(/{([^}]+)}/g, (match, group0) => {
|
||||
switch (group0) {
|
||||
case "timestamp":
|
||||
return formatTimestampLocal(job.created);
|
||||
default:
|
||||
return match;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Determine the intermediate render output path.
|
||||
function intermediatePath(job, finalDir) {
|
||||
const basename = path.basename(finalDir);
|
||||
const name = `${basename}__intermediate-${formatTimestampLocal(job.created)}`;
|
||||
return path.join(path.dirname(finalDir), name);
|
||||
}
|
||||
|
||||
function authorRenderTasks(settings, renderDir, renderOutput) {
|
||||
print("authorRenderTasks(", renderDir, renderOutput, ")");
|
||||
let renderTasks = [];
|
||||
let chunks = frameChunker(settings.frames, settings.chunk_size);
|
||||
for (let chunk of chunks) {
|
||||
const task = author.Task(`render-${chunk}`, "blender");
|
||||
const command = author.Command("blender-render", {
|
||||
exe: settings.blender_cmd,
|
||||
argsBefore: [],
|
||||
blendfile: settings.blendfile,
|
||||
args: [
|
||||
"--render-output", path.join(renderDir, path.basename(renderOutput)),
|
||||
"--render-format", settings.format,
|
||||
"--render-frame", chunk.replace("-", ".."), // Convert to Blender frame range notation.
|
||||
]
|
||||
});
|
||||
task.addCommand(command);
|
||||
renderTasks.push(task);
|
||||
}
|
||||
return renderTasks;
|
||||
}
|
||||
|
||||
function authorCreateVideoTask(settings, renderDir) {
|
||||
if (settings.images_or_video == "video") {
|
||||
print("Not authoring video task, render output is already a video");
|
||||
return;
|
||||
}
|
||||
if (ffmpegIncompatibleImageFormats.has(settings.format)) {
|
||||
print("Not authoring video task, FFmpeg-incompatible render output")
|
||||
return;
|
||||
}
|
||||
if (!settings.fps) {
|
||||
print("Not authoring video task, no FPS known:", settings);
|
||||
return;
|
||||
}
|
||||
|
||||
const stem = path.stem(settings.blendfile).replace('.flamenco', '');
|
||||
const outfile = path.join(renderDir, `${stem}-${settings.frames}.mp4`);
|
||||
const outfileExt = guessOutputFileExtension(settings);
|
||||
|
||||
const task = author.Task('preview-video', 'ffmpeg');
|
||||
const command = author.Command("frames-to-video", {
|
||||
exe: "{ffmpeg}",
|
||||
fps: settings.fps,
|
||||
inputGlob: path.join(renderDir, `*${outfileExt}`),
|
||||
outputFile: outfile,
|
||||
args: [
|
||||
"-c:v", "h264",
|
||||
"-crf", "20",
|
||||
"-g", "18",
|
||||
"-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2",
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-r", settings.fps,
|
||||
"-y", // Be sure to always pass either "-n" or "-y".
|
||||
],
|
||||
});
|
||||
task.addCommand(command);
|
||||
|
||||
print(`Creating output video for ${settings.format}`);
|
||||
return task;
|
||||
}
|
||||
|
||||
function authorCleanupTask(finalDir, renderDir) {
|
||||
const task = author.Task("move-to-final", "file-management");
|
||||
const command = author.Command("move-directory", {
|
||||
src: renderDir,
|
||||
dest: finalDir,
|
||||
});
|
||||
task.addCommand(command);
|
||||
return task;
|
||||
}
|
||||
|
||||
// Return file name extension, including period, like '.png' or '.mkv'.
|
||||
function guessOutputFileExtension(settings) {
|
||||
switch (settings.images_or_video) {
|
||||
case "images":
|
||||
return settings.image_file_extension;
|
||||
case "video":
|
||||
const container = settings.video_container_format;
|
||||
if (container in videoContainerToExtension) {
|
||||
return videoContainerToExtension[container];
|
||||
}
|
||||
return "." + container.lower();
|
||||
default:
|
||||
throw `invalid setting images_or_video: "${settings.images_or_video}"`
|
||||
}
|
||||
}
|
@ -6,6 +6,7 @@ import (
|
||||
"embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
@ -17,19 +18,32 @@ import (
|
||||
//go:embed scripts
|
||||
var scriptsFS embed.FS
|
||||
|
||||
// loadScripts iterates over all JavaScript files, compiles them, and stores the
|
||||
// result into `s.compilers`.
|
||||
func (s *Service) loadScripts() error {
|
||||
scripts, err := scriptsFS.ReadDir("scripts")
|
||||
scriptsSubFS, err := fs.Sub(scriptsFS, "scripts")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to find scripts: %w", err)
|
||||
return fmt.Errorf("failed to find embedded 'scripts' directory: %w", err)
|
||||
}
|
||||
|
||||
for _, script := range scripts {
|
||||
if !strings.HasSuffix(script.Name(), ".js") {
|
||||
return s.loadScriptsFrom(scriptsSubFS)
|
||||
}
|
||||
|
||||
// loadScriptsFrom iterates over all given directory entries, compiles the
|
||||
// files, and stores the result into `s.compilers`.
|
||||
func (s *Service) loadScriptsFrom(filesystem fs.FS) error {
|
||||
dirEntries, err := fs.ReadDir(filesystem, ".")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to find scripts in %v: %w", filesystem, err)
|
||||
}
|
||||
|
||||
for _, dirEntry := range dirEntries {
|
||||
filename := dirEntry.Name()
|
||||
if !strings.HasSuffix(filename, ".js") {
|
||||
continue
|
||||
}
|
||||
filename := path.Join("scripts", script.Name())
|
||||
|
||||
script_bytes, err := s.loadScriptBytes(filename)
|
||||
script_bytes, err := s.loadScriptBytes(filesystem, filename)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("filename", filename).Msg("failed to read script")
|
||||
continue
|
||||
@ -41,21 +55,24 @@ func (s *Service) loadScripts() error {
|
||||
continue
|
||||
}
|
||||
|
||||
jobTypeName := filenameToJobType(script.Name())
|
||||
jobTypeName := filenameToJobType(filename)
|
||||
s.compilers[jobTypeName] = Compiler{
|
||||
jobType: jobTypeName,
|
||||
program: program,
|
||||
filename: script.Name(),
|
||||
filename: filename,
|
||||
}
|
||||
|
||||
log.Debug().Str("script", script.Name()).Str("jobType", jobTypeName).Msg("loaded script")
|
||||
log.Debug().
|
||||
Str("script", filename).
|
||||
Str("jobType", jobTypeName).
|
||||
Msg("loaded script")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) loadScriptBytes(path string) ([]byte, error) {
|
||||
file, err := scriptsFS.Open(path)
|
||||
func (s *Service) loadScriptBytes(filesystem fs.FS, path string) ([]byte, error) {
|
||||
file, err := filesystem.Open(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open embedded script: %w", err)
|
||||
}
|
||||
|
39
internal/manager/job_compilers/scripts_test.go
Normal file
39
internal/manager/job_compilers/scripts_test.go
Normal file
@ -0,0 +1,39 @@
|
||||
package job_compilers
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestLoadScriptsFrom_skip_nonjs(t *testing.T) {
|
||||
s := Service{}
|
||||
|
||||
thisDirFS := os.DirFS(".")
|
||||
assert.NoError(t, s.loadScriptsFrom(thisDirFS), "input without JS files should not cause errors")
|
||||
assert.Empty(t, s.compilers)
|
||||
}
|
||||
|
||||
func TestLoadScriptsFrom_on_disk_js(t *testing.T) {
|
||||
s := Service{
|
||||
compilers: map[string]Compiler{},
|
||||
}
|
||||
|
||||
scriptsFS := os.DirFS("scripts-for-unittest")
|
||||
assert.NoError(t, s.loadScriptsFrom(scriptsFS))
|
||||
expectKeys := map[string]bool{
|
||||
"echo-and-sleep": true,
|
||||
"simple-blender-render": true,
|
||||
}
|
||||
assert.Equal(t, expectKeys, keys(s.compilers))
|
||||
}
|
||||
|
||||
// keys returns the set of keys of the mapping.
|
||||
func keys[K comparable, V any](mapping map[K]V) map[K]bool {
|
||||
keys := map[K]bool{}
|
||||
for k := range mapping {
|
||||
keys[k] = true
|
||||
}
|
||||
return keys
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user