Automatically evaluate hidden job settings
Job settings that are not visible and have an `eval` key will be automatically evaluated when the job is submitted.
This commit is contained in:
parent
4e570d601c
commit
e15f066dde
@ -68,6 +68,32 @@ def set_blend_file(
|
|||||||
job.settings[BLENDFILE_SETTING_KEY] = str(blendfile)
|
job.settings[BLENDFILE_SETTING_KEY] = str(blendfile)
|
||||||
|
|
||||||
|
|
||||||
|
def eval_hidden_settings(
|
||||||
|
context: bpy.types.Context, job_type: _AvailableJobType, job: _SubmittedJob
|
||||||
|
) -> None:
|
||||||
|
"""Assign values to settings hidden from the UI.
|
||||||
|
|
||||||
|
If the setting has an `eval` property, it'll be evaluated and used as the
|
||||||
|
setting value. Otherwise the default is used.
|
||||||
|
"""
|
||||||
|
for setting in job_type.settings:
|
||||||
|
if setting.get("visible", True):
|
||||||
|
# Skip those settings that will be visible in the GUI.
|
||||||
|
continue
|
||||||
|
|
||||||
|
setting_eval = setting.get("eval", "")
|
||||||
|
if setting_eval:
|
||||||
|
value = JobTypePropertyGroup.eval_setting(context, setting_eval)
|
||||||
|
elif "default" in setting:
|
||||||
|
value = setting.default
|
||||||
|
else:
|
||||||
|
# No way to get a default value, so just don't bother overwriting
|
||||||
|
# anything.
|
||||||
|
continue
|
||||||
|
|
||||||
|
job.settings[setting.key] = value
|
||||||
|
|
||||||
|
|
||||||
def submit_job(job: _SubmittedJob, api_client: _ApiClient) -> _Job:
|
def submit_job(job: _SubmittedJob, api_client: _ApiClient) -> _Job:
|
||||||
"""Send the given job to Flamenco Manager."""
|
"""Send the given job to Flamenco Manager."""
|
||||||
from flamenco.manager import ApiClient
|
from flamenco.manager import ApiClient
|
||||||
|
@ -47,15 +47,23 @@ class JobTypePropertyGroup:
|
|||||||
|
|
||||||
return js
|
return js
|
||||||
|
|
||||||
def eval_setting(
|
def eval_and_assign(
|
||||||
self, context: bpy.types.Context, setting_key: str, setting_eval: str
|
self, context: bpy.types.Context, setting_key: str, setting_eval: str
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""Evaluate `setting_eval` and assign the result to the job setting."""
|
||||||
|
value = self.eval_setting(context, setting_eval)
|
||||||
|
setattr(self, setting_key, value)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def eval_setting(context: bpy.types.Context, setting_eval: str) -> Any:
|
||||||
|
"""Evaluate `setting_eval` and return the result."""
|
||||||
|
|
||||||
eval_globals = {
|
eval_globals = {
|
||||||
"bpy": bpy,
|
"bpy": bpy,
|
||||||
"C": context,
|
"C": context,
|
||||||
}
|
}
|
||||||
value = eval(setting_eval, eval_globals, {})
|
value = eval(setting_eval, eval_globals, {})
|
||||||
setattr(self, setting_key, value)
|
return value
|
||||||
|
|
||||||
|
|
||||||
# Mapping from AvailableJobType.setting.type to a callable that converts a value
|
# Mapping from AvailableJobType.setting.type to a callable that converts a value
|
||||||
|
@ -116,7 +116,7 @@ class FLAMENCO_OT_eval_setting(FlamencoOpMixin, bpy.types.Operator):
|
|||||||
|
|
||||||
def execute(self, context: bpy.types.Context) -> set[str]:
|
def execute(self, context: bpy.types.Context) -> set[str]:
|
||||||
propgroup = context.scene.flamenco_job_settings
|
propgroup = context.scene.flamenco_job_settings
|
||||||
propgroup.eval_setting(context, self.setting_key, self.setting_eval)
|
propgroup.eval_and_assign(context, self.setting_key, self.setting_eval)
|
||||||
return {"FINISHED"}
|
return {"FINISHED"}
|
||||||
|
|
||||||
|
|
||||||
@ -261,11 +261,15 @@ class FLAMENCO_OT_submit_job(FlamencoOpMixin, bpy.types.Operator):
|
|||||||
def _submit_job(self, context: bpy.types.Context) -> None:
|
def _submit_job(self, context: bpy.types.Context) -> None:
|
||||||
"""Use the Flamenco API to submit the new Job."""
|
"""Use the Flamenco API to submit the new Job."""
|
||||||
assert self.job is not None
|
assert self.job is not None
|
||||||
|
assert self.blendfile_on_farm is not None
|
||||||
job_type = job_types.active_job_type(context.scene)
|
|
||||||
job_submission.set_blend_file(job_type, self.job, self.blendfile_on_farm)
|
|
||||||
|
|
||||||
api_client = self.get_api_client(context)
|
api_client = self.get_api_client(context)
|
||||||
|
|
||||||
|
job_type = job_types.active_job_type(context.scene)
|
||||||
|
assert job_type is not None # If we're here, the job type should be known.
|
||||||
|
|
||||||
|
job_submission.set_blend_file(job_type, self.job, self.blendfile_on_farm)
|
||||||
|
job_submission.eval_hidden_settings(context, job_type, self.job)
|
||||||
job = job_submission.submit_job(self.job, api_client)
|
job = job_submission.submit_job(self.job, api_client)
|
||||||
|
|
||||||
self.report({"INFO"}, "Job %s submitted" % job.name)
|
self.report({"INFO"}, "Job %s submitted" % job.name)
|
||||||
|
@ -23,10 +23,11 @@ func exampleSubmittedJob() api.SubmittedJob {
|
|||||||
"chunk_size": 3,
|
"chunk_size": 3,
|
||||||
"extract_audio": true,
|
"extract_audio": true,
|
||||||
"format": "PNG",
|
"format": "PNG",
|
||||||
"fps": 24,
|
"fps": 24.0,
|
||||||
"frames": "1-10",
|
"frames": "1-10",
|
||||||
"images_or_video": "images",
|
"images_or_video": "images",
|
||||||
"output_file_extension": ".png",
|
"image_file_extension": ".png",
|
||||||
|
"video_container_format": "",
|
||||||
"render_output": "/render/sprites/farm_output/promo/square_ellie/square_ellie.lighting_light_breakdown2/######",
|
"render_output": "/render/sprites/farm_output/promo/square_ellie/square_ellie.lighting_light_breakdown2/######",
|
||||||
}}
|
}}
|
||||||
metadata := api.JobMetadata{
|
metadata := api.JobMetadata{
|
||||||
|
@ -3,13 +3,15 @@
|
|||||||
const JOB_TYPE = {
|
const JOB_TYPE = {
|
||||||
label: "Simple Blender Render",
|
label: "Simple Blender Render",
|
||||||
settings: [
|
settings: [
|
||||||
{ key: "blender_cmd", type: "string", default: "{blender}", visible: false },
|
// Settings for artists to determine:
|
||||||
{ key: "blendfile", type: "string", required: true, description: "Path of the Blend file to render", visible: false },
|
|
||||||
{ key: "chunk_size", type: "int32", default: 1, description: "Number of frames to render in one Blender render task" },
|
{ key: "chunk_size", type: "int32", default: 1, description: "Number of frames to render in one Blender render task" },
|
||||||
{ key: "frames", type: "string", required: true, eval: "f'{C.scene.frame_start}-{C.scene.frame_end}'"},
|
{ key: "frames", type: "string", required: true, eval: "f'{C.scene.frame_start}-{C.scene.frame_end}'"},
|
||||||
{ key: "render_output", type: "string", subtype: "hashed_file_path", required: true },
|
{ key: "render_output", type: "string", subtype: "hashed_file_path", required: true },
|
||||||
{ key: "fps", type: "float", eval: "C.scene.render.fps / C.scene.render.fps_base" },
|
|
||||||
{ key: "extract_audio", type: "bool", default: true },
|
// Automatically evaluated settings:
|
||||||
|
{ key: "blender_cmd", type: "string", default: "{blender}", visible: false },
|
||||||
|
{ key: "blendfile", type: "string", required: true, description: "Path of the Blend file to render", visible: false },
|
||||||
|
{ key: "fps", type: "float", eval: "C.scene.render.fps / C.scene.render.fps_base", visible: false },
|
||||||
{
|
{
|
||||||
key: "images_or_video",
|
key: "images_or_video",
|
||||||
type: "string",
|
type: "string",
|
||||||
@ -18,8 +20,11 @@ const JOB_TYPE = {
|
|||||||
visible: false,
|
visible: false,
|
||||||
eval: "'video' if C.scene.render.image_settings.file_format in {'FFMPEG', 'AVI_RAW', 'AVI_JPEG'} else 'image'"
|
eval: "'video' if C.scene.render.image_settings.file_format in {'FFMPEG', 'AVI_RAW', 'AVI_JPEG'} else 'image'"
|
||||||
},
|
},
|
||||||
{ key: "format", type: "string", required: true, eval: "C.scene.render.image_settings.file_format" },
|
{ key: "format", type: "string", required: true, eval: "C.scene.render.image_settings.file_format", visible: false },
|
||||||
{ key: "output_file_extension", type: "string", required: true },
|
{ key: "image_file_extension", type: "string", required: true, eval: "C.scene.render.file_extension", visible: false,
|
||||||
|
description: "File extension used when rendering images; ignored when images_or_video='video'" },
|
||||||
|
{ key: "video_container_format", type: "string", required: true, eval: "C.scene.render.ffmpeg.format", visible: false,
|
||||||
|
description: "Container format used when rendering video; ignored when images_or_video='images'" },
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -33,6 +38,17 @@ const ffmpegIncompatibleImageFormats = new Set([
|
|||||||
"OPEN_EXR_MULTILAYER", // DNA values for these formats.
|
"OPEN_EXR_MULTILAYER", // DNA values for these formats.
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
// Mapping from video container (scene.render.ffmpeg.format) to the file name
|
||||||
|
// extension typically used to store those videos.
|
||||||
|
const videoContainerToExtension = {
|
||||||
|
"QUICKTIME": ".mov",
|
||||||
|
"MPEG1": ".mpg",
|
||||||
|
"MPEG2": ".dvd",
|
||||||
|
"MPEG4": ".mp4",
|
||||||
|
"OGG": ".ogv",
|
||||||
|
"FLASH": ".flv",
|
||||||
|
};
|
||||||
|
|
||||||
function compileJob(job) {
|
function compileJob(job) {
|
||||||
print("Blender Render job submitted");
|
print("Blender Render job submitted");
|
||||||
print("job: ", job);
|
print("job: ", job);
|
||||||
@ -91,21 +107,25 @@ function authorRenderTasks(settings, renderDir, renderOutput) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function authorCreateVideoTask(settings, renderDir) {
|
function authorCreateVideoTask(settings, renderDir) {
|
||||||
|
if (settings.images_or_video == "video") {
|
||||||
|
print("Not authoring video task, render output is already a video");
|
||||||
|
}
|
||||||
if (ffmpegIncompatibleImageFormats.has(settings.format)) {
|
if (ffmpegIncompatibleImageFormats.has(settings.format)) {
|
||||||
print("Not authoring video task, FFmpeg-incompatible render output")
|
print("Not authoring video task, FFmpeg-incompatible render output")
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!settings.fps || !settings.output_file_extension) {
|
if (!settings.fps) {
|
||||||
print("Not authoring video task, no FPS or output file extension setting:", settings)
|
print("Not authoring video task, no FPS known:", settings);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const stem = path.stem(settings.blendfile).replace('.flamenco', '');
|
const stem = path.stem(settings.blendfile).replace('.flamenco', '');
|
||||||
const outfile = path.join(renderDir, `${stem}-${settings.frames}.mp4`);
|
const outfile = path.join(renderDir, `${stem}-${settings.frames}.mp4`);
|
||||||
|
const outfileExt = guessOutputFileExtension(settings);
|
||||||
|
|
||||||
const task = author.Task('create-video', 'ffmpeg');
|
const task = author.Task('create-video', 'ffmpeg');
|
||||||
const command = author.Command("create-video", {
|
const command = author.Command("create-video", {
|
||||||
input_files: path.join(renderDir, `*${settings.output_file_extension}`),
|
input_files: path.join(renderDir, `*${outfileExt}`),
|
||||||
output_file: outfile,
|
output_file: outfile,
|
||||||
fps: settings.fps,
|
fps: settings.fps,
|
||||||
});
|
});
|
||||||
@ -114,3 +134,16 @@ function authorCreateVideoTask(settings, renderDir) {
|
|||||||
print(`Creating output video for ${settings.format}`);
|
print(`Creating output video for ${settings.format}`);
|
||||||
return task;
|
return task;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Return file name extension, including period, like '.png' or '.mkv'.
|
||||||
|
function guessOutputFileExtension(settings) {
|
||||||
|
if (settings.images_or_video == "images") {
|
||||||
|
return settings.image_file_extension;
|
||||||
|
}
|
||||||
|
|
||||||
|
container = scene.render.ffmpeg.format
|
||||||
|
if (container in videoContainerToExtension) {
|
||||||
|
return videoContainerToExtension[container];
|
||||||
|
}
|
||||||
|
return "." + container.lower();
|
||||||
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user