Hi there,
I have been making a HDA that contains a python script in the 'python module' section. The goal is to wire multiple mantra nodes into this single HDA and then correctly generate multiple setups inside the HDA that configure the rendering setup to be handled by our farm running Afanacy.
The tool is working pretty nicely on its own, but once I put multiple nodes in the OUT network and generate a second one it also affects the other nodes. I thought this was because I had global variables in my python script, so I got rid of them and made a 'dataObject' class that is generated on the single nodes and passed down to all functions.
however, this did not solve the issue. I figure it has something to do with scope but can't figure out what it is. I'll put the code down here below. Feel free to give any tips, I don't have much experience with Houdini python in particular.
import hou
import os
import re
from os import path
alphabet = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
class dataObject:
renderasset = hou.Node
base_ifd_generator = hou.Node
base_distributed_renderer = hou.Node
base_afanacy_merger = hou.Node
master_merger = hou.Node
mantra_duplicates = [] # List of all mantra nodes that will be compied from inputs. Ordered by input index
ifd_generators = [] # List of all Afanacy nodes that are configured as IFD generators
distributed_renderers = [] # List of all afanacy nodes that are configured as renderers
afanacy_mergers = [] # List of all afanacy nodes that are only used for merging the different stages.
def reset(data):
for mantra in data.mantra_duplicates:
mantra.destroy()
for ifd in data.ifd_generators:
ifd.destroy()
for renderer in data.distributed_renderers:
renderer.destroy()
for merger in data.afanacy_mergers:
merger.destroy()
del data.mantra_duplicates[:]
del data.ifd_generators[:]
del data.distributed_renderers[:]
del data.afanacy_mergers[:]
def getOptions(data):
autoversioning = data.renderasset.parm('autoversion').eval()
print("automatic versioning set to; " + str(autoversioning))
def generate(thisnode):
data = dataObject()
data.renderasset = hou.node(thisnode.path())
print(data.renderasset.name())
data.base_ifd_generator = hou.node(thisnode.path()).glob("ifd_generator_SETUP")
data.base_distributed_renderer = hou.node(thisnode.path()).glob("distributed_renderer_SETUP")
data.base_afanacy_merger = hou.node(thisnode.path()).glob("afanasy_merge_SETUP")
data.master_merger = hou.node(thisnode.path()).glob("*MASTER_MERGE*")
reset(data)
getOptions(data)
configureRenderers(data)
print("Internal network for node '" + data.renderasset.name() + "' has been generated.")
def submit():
getOptions()
size = len(data.mantra_duplicates)
if size < 1:
print("WARNING: render asset should first generate an internal network before submitting.")
else:
checkVersions()
data.master_merger[0].parm('submit').pressButton()
def configureRenderers(data):
data.master_merger[0].setParms({
"job_name" : data.renderasset.parm('jobname').eval()
})
inputs = data.renderasset.inputs()
print("name: " + str(data.renderasset.name()))
for idx,value in enumerate(inputs):
configureRenderSetup(data,idx, value)
connectRenderSetup(data,idx)
setIfdSettings(data,idx)
setRendererSettings(data,idx)
setMergerSettings(data,idx)
layoutNetwork(data,idx)
connectMasterRenderer(data)
data.renderasset.layoutChildren()
def computeVersion(outputPath, patternName):
if path.exists(outputPath):
ifd_directories = os.listdir(outputPath)
pattern = re.compile(patternName)
version = 0
for dir in ifd_directories:
if pattern.search(dir):
version += 1
return alphabet[version]
def checkVersions(data):
for idx, mantra in enumerate(data.mantra_duplicates):
render_version = computeVersion(data.renderasset.parm('renderroot').eval(), data.renderasset.inputs()[idx].name())
ifd_version = render_version
ifd_destination = data.renderasset.parm('ifdroot').eval() + "/" + "ifd_" + data.renderasset.inputs()[idx].name() + "_" + ifd_version + "/" + data.renderasset.inputs()[idx].name() + "_ifd.$F4.ifd"
img_destination = data.renderasset.parm('renderroot').eval() + data.renderasset.inputs()[idx].name() + "_" + render_version + "/" + data.renderasset.inputs()[idx].name() + ".$F4.exr"
mantra.setParms(
{
"soho_diskfile" : ifd_destination,
"vm_picture" : img_destination
})
def configureRenderSetup(data,inputIndex, node):
data.mantra_duplicates.append(node.copyTo(data.renderasset))
data.mantra_duplicates[inputIndex].setName("MantraInput_" + str(inputIndex))
render_version = "_" + str(computeVersion(data.renderasset.parm('renderroot').eval(), data.renderasset.inputs()[inputIndex].name()))
ifd_version = render_version
ifd_destination = data.renderasset.parm('ifdroot').eval() + "/" + "ifd_" + data.renderasset.inputs()[inputIndex].name() + "/" + data.renderasset.inputs()[inputIndex].name() + "_ifd.$F4.ifd"
img_destination = data.renderasset.parm('renderroot').eval() + data.renderasset.inputs()[inputIndex].name() + "/" + data.renderasset.inputs()[inputIndex].name() + ".$F4.exr"
if bool(data.renderasset.parm('autoversion').eval()):
print("autoversioning")
ifd_destination = data.renderasset.parm('ifdroot').eval() + "/" + "ifd_" + data.renderasset.inputs()[inputIndex].name() + ifd_version + "/" + data.renderasset.inputs()[inputIndex].name() + "_ifd.$F4.ifd"
img_destination = data.renderasset.parm('renderroot').eval() + data.renderasset.inputs()[inputIndex].name() + render_version + "/" + data.renderasset.inputs()[inputIndex].name() + ".$F4.exr"
data.mantra_duplicates[inputIndex].setParms(
{
"soho_outputmode":1,
"soho_diskfile" : ifd_destination,
"vm_inlinestorage" : 1,
"vm_writecheckpoint" : 0,
"vm_picture" : img_destination
})
data.ifd_generators.append(data.base_ifd_generator[0].copyTo(data.renderasset))
data.distributed_renderers.append(data.base_distributed_renderer[0].copyTo(data.renderasset))
data.afanacy_mergers.append(data.base_afanacy_merger[0].copyTo(data.renderasset))
def connectRenderSetup(data, inputIndex):
data.ifd_generators[inputIndex].setInput(0,data.mantra_duplicates[inputIndex])
data.afanacy_mergers[inputIndex].setInput(0,data.ifd_generators[inputIndex])
data.afanacy_mergers[inputIndex].setInput(1,data.distributed_renderers[inputIndex])
def setIfdSettings(data, inputIndex):
data.ifd_generators[inputIndex].setParms(
{
"f1" : data.mantra_duplicates[inputIndex].parm('f1'),
"f2" : data.mantra_duplicates[inputIndex].parm('f2'),
"enable_extended_parameters" : 1,
"hosts_mask" : data.renderasset.parm('ifdhosts').eval(),
"priority" : data.renderasset.parm('priority').eval()
})
def setRendererSettings(data, inputIndex):
data.distributed_renderers[inputIndex].setParms(
{
"f1" : data.mantra_duplicates[inputIndex].parm('f1'),
"f2" : data.mantra_duplicates[inputIndex].parm('f2'),
"cmd_files" : data.mantra_duplicates[inputIndex].parm('soho_diskfile'),
"enable_extended_parameters" : 1,
"hosts_mask" : data.renderasset.parm('renderhostmask').eval(),
"priority" : data.renderasset.parm('priority').eval()
})
def setMergerSettings(data, inputIndex):
data.afanacy_mergers[inputIndex].setParms(
{
"f1" : data.mantra_duplicates[inputIndex].parm('f1'),
"f2" : data.mantra_duplicates[inputIndex].parm('f2'),
"enable_extended_parameters" : 1,
"hosts_mask" : data.renderasset.parm('renderhostmask').eval(),
"priority" : data.renderasset.parm('priority').eval(),
"job_name" : data.renderasset.inputs()[inputIndex].name() + "_afanacy_renderer"
})
def layoutNetwork(data, inputIndex):
data.mantra_duplicates[inputIndex].moveToGoodPosition()
data.renderasset.layoutChildren()
def connectMasterRenderer(data):
for i,v in enumerate(data.afanacy_mergers):
print("i: " + str(i))
print("v: " + str(v.name()))
print("mm: " + str(data.master_merger[0].name()))
data.master_merger[0].setInput(i,v)