Add files via upload
Initial commit of ABX development sources. The abx package itself is in the abx folder. The MakeAddOnZip.py script creates the archive to install into Blender in the pkg folder. The tests folder contains unit tests which require the contents of testdata as fixtures.
|
@ -0,0 +1,11 @@
|
|||
#script to run:
|
||||
SCRIPT="/project/terry/Dev/eclipse-workspace/ABX/src/abx.py"
|
||||
|
||||
#path to the PyDev folder that contains a file named pydevd.py:
|
||||
PYDEVD_PATH='/home/terry/.eclipse/360744294_linux_gtk_x86_64/plugins/org.python.pydev.core_7.3.0.201908161924/pysrc/'
|
||||
|
||||
#PYDEVD_PATH='/home/terry/.config/blender/2.79/scripts/addons/modules/pydev_debug.py'
|
||||
|
||||
import pydev_debug as pydev #pydev_debug.py is in a folder from Blender PYTHONPATH
|
||||
|
||||
pydev.debug(SCRIPT, PYDEVD_PATH, trace = True)
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
# Run the script in the debugger client within Blender:
|
||||
import subprocess
|
||||
subprocess.call(['blender', '-P', '/project/terry/Dev/eclipse-workspace/ABX/BlenderRemoteDebug.py'])
|
|
@ -0,0 +1,64 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
MakeAddOnZip.py
|
||||
|
||||
Utility script to package ABX into the "abx-##.##.zip" file needed for Installation
|
||||
in Blender.
|
||||
"""
|
||||
import subprocess, os
|
||||
|
||||
import bpy, bpy.utils
|
||||
|
||||
import abx
|
||||
|
||||
VERSION_PKG = ('a',)
|
||||
VERSION = abx.bl_info['version'] + VERSION_PKG
|
||||
|
||||
#VERSION = (0,1,2,'a')
|
||||
|
||||
#AODIR = 'abx%d%d%d%s' % VERSION # Addon directory name for Blender
|
||||
AODIR = 'abx'
|
||||
PKGNAME = 'abx-%d.%d.%d%s' % VERSION # Package name for ZIP file
|
||||
|
||||
# PROJDIR is the project directory, one above the source tree, where my associated
|
||||
# stuff lives: documentation, management scripts, etc.
|
||||
# Normally this script is in it, and so the directory of __file__ is what I want.
|
||||
# But not if I'm testing this code out on the console!
|
||||
try:
|
||||
# Normally if I'm running from a script, I want the directory the script is in
|
||||
PROJDIR = os.path.dirname(os.path.abspath(__file__))
|
||||
except:
|
||||
# Occasionally I might be trying to run from a console, in which case there's
|
||||
# no file, and I probably just want to use the "present working directory"
|
||||
# Hopefully, at that point, I'm smart enough to have set it correctly!
|
||||
PROJDIR = os.getcwd()
|
||||
|
||||
PKGDIR = os.path.join(PROJDIR, 'pkg') # Directory used for building packages.
|
||||
|
||||
print( "VERSION: %d.%d.%d%s" % VERSION)
|
||||
print( "PACKAGE DIRECTORY: ", PKGDIR)
|
||||
print( "WORKING DIRECTORY: ", PROJDIR)
|
||||
|
||||
|
||||
subprocess.run(('rm', '-rf', AODIR), cwd=PKGDIR)
|
||||
subprocess.run(('rm', PKGNAME+'.zip'), cwd=PKGDIR)
|
||||
subprocess.run(('mkdir', AODIR), cwd=PKGDIR)
|
||||
|
||||
files = os.listdir(os.path.join(PROJDIR, 'abx'))
|
||||
pkg_files = []
|
||||
for ext in ('.py', '.yaml', '.cfg'):
|
||||
pkg_files.extend([
|
||||
os.path.abspath(os.path.join(PROJDIR, 'abx', f))
|
||||
for f in files if f.endswith(ext)])
|
||||
|
||||
subprocess.run(('cp',) + tuple(pkg_files) + (
|
||||
os.path.join(PKGDIR, AODIR),), cwd=PROJDIR)
|
||||
subprocess.run(('zip', '-r', PKGNAME+'.zip', AODIR), cwd=PKGDIR)
|
||||
|
||||
# TODO: It would be good to clean the copied source tree, to get rid of unwanted files
|
||||
# or else I could make the copy operation more selective. As it is, I'm packaging
|
||||
# a lot of unnecessary files.
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
|
||||
bl_info = {
|
||||
"name": "ABX",
|
||||
"author": "Terry Hancock / Lunatics.TV Project / Anansi Spaceworks",
|
||||
"version": (0, 2, 6),
|
||||
"blender": (2, 79, 0),
|
||||
"location": "SpaceBar Search -> ABX",
|
||||
"description": "Anansi Studio Extensions for Blender",
|
||||
"warning": "",
|
||||
"wiki_url": "",
|
||||
"tracker_url": "",
|
||||
"category": "Object",
|
||||
}
|
||||
|
||||
blender_present = False
|
||||
try:
|
||||
# These are protected so we can read the add-on metadata from my
|
||||
# management scripts, which run in the O/S standard Python 3
|
||||
import bpy, bpy.utils, bpy.types
|
||||
blender_present = True
|
||||
|
||||
except ImportError:
|
||||
print("Blender Add-On 'ABX' requires the Blender Python environment to run.")
|
||||
|
||||
print("blender_present = ", blender_present)
|
||||
|
||||
if blender_present:
|
||||
from . import abx_ui
|
||||
|
||||
def register():
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
|
@ -0,0 +1,72 @@
|
|||
# DEFAULT ABX SETTINGS
|
||||
---
|
||||
project_schema:
|
||||
- rank: project
|
||||
delimiter: '-'
|
||||
words: True
|
||||
type: string
|
||||
|
||||
- rank: sequence
|
||||
type:
|
||||
VN: Vague Name
|
||||
|
||||
- rank: shot
|
||||
type: letter
|
||||
maxlength: 1
|
||||
|
||||
- rank: element
|
||||
type: string
|
||||
maxlength: 2
|
||||
|
||||
render_profiles:
|
||||
previz:
|
||||
name: PreViz,
|
||||
desc: 'GL/AVI Previz Render for Animatics',
|
||||
engine: gl
|
||||
version: any
|
||||
fps: 30
|
||||
fps_div: 1000
|
||||
fps_skip: 1
|
||||
suffix: GL
|
||||
format: AVI_JPEG
|
||||
extension: avi
|
||||
freestyle: False
|
||||
|
||||
quick:
|
||||
name: 30fps Paint
|
||||
desc: '30fps Simplified Paint-Only Render'
|
||||
engine: bi
|
||||
fps: 30
|
||||
fps_skip: 3
|
||||
suffix: PT
|
||||
format: AVI_JPEG
|
||||
extension: avi
|
||||
freestyle: False,
|
||||
antialias: False,
|
||||
motionblur: False
|
||||
|
||||
check:
|
||||
name: 1fps Check
|
||||
desc: '1fps Full-Features Check Renders'
|
||||
engine: bi
|
||||
fps: 30
|
||||
fps_skip: 30
|
||||
suffix: CH
|
||||
format: JPEG
|
||||
extension: jpg
|
||||
framedigits: 5
|
||||
freestyle: True
|
||||
antialias: 8
|
||||
|
||||
full:
|
||||
name: 30fps Full
|
||||
desc: 'Full Render with all Features Turned On',
|
||||
engine: bi
|
||||
fps: 30
|
||||
fps_skip: 1
|
||||
suffix: ''
|
||||
format: PNG
|
||||
extension: png
|
||||
framedigits: 5
|
||||
freestyle: True
|
||||
antialias: 8
|
|
@ -0,0 +1,530 @@
|
|||
# Anansi Studio Extensions for Blender 'ABX'
|
||||
"""
|
||||
Collection of Blender extension tools to make our jobs easier.
|
||||
This is not really meant to be an integrated plugin, but rather
|
||||
a collection of useful scripts we can run to solve problems we
|
||||
run into.
|
||||
"""
|
||||
#
|
||||
#Copyright (C) 2019 Terry Hancock
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#
|
||||
|
||||
|
||||
|
||||
import os
|
||||
|
||||
import bpy, bpy.utils, bpy.types, bpy.props
|
||||
|
||||
from . import copy_anim
|
||||
from . import std_lunatics_ink
|
||||
from . import render_profile
|
||||
|
||||
|
||||
#configfile = os.path.join(os.path.dirname(__file__), 'config.yaml')
|
||||
|
||||
#print("Configuration file path: ", os.path.abspath(configfile))
|
||||
|
||||
# Lunatics Scene Panel
|
||||
|
||||
# Lunatics file/scene properties:
|
||||
|
||||
# TODO: This hard-coded table is a temporary solution until I have figured
|
||||
# out a good way to look these up from the project files (maybe YAML?):
|
||||
seq_id_table = {
|
||||
('S1', 0): {'':'', 'mt':'Main Title'},
|
||||
('S1', 1): {'':'',
|
||||
'TR':'Train',
|
||||
'SR':'Soyuz Rollout',
|
||||
'TB':'Touring Baikonur',
|
||||
'PC':'Press Conference',
|
||||
'SU':'Suiting Up',
|
||||
'LA':'Launch',
|
||||
'SF':'Soyuz Flight',
|
||||
'mt':'Main Title',
|
||||
'ad':'Ad Spot',
|
||||
'pv':'Preview',
|
||||
'et':'Episode Titles',
|
||||
'cr':'Credits'
|
||||
},
|
||||
('S1', 2): {'':'',
|
||||
'MM':'Media Montage',
|
||||
'mt':'Main Title',
|
||||
'et':'Episode Titles',
|
||||
'SS':'Space Station',
|
||||
'LC':'Loading Cargo',
|
||||
'TL':'Trans Lunar Injection',
|
||||
'BT':'Bed Time',
|
||||
'ad':'Ad Spot',
|
||||
'pv':'Preview',
|
||||
'cr':'Credits'
|
||||
},
|
||||
('S1', 3): {'':'',
|
||||
'mt':'Main Title',
|
||||
'et':'Episode Titles',
|
||||
'ZG':'Zero G',
|
||||
'LI':'Lunar Injection',
|
||||
'LO':'Lunar Orbit',
|
||||
'ML':'Moon Landing',
|
||||
'IR':'Iridium',
|
||||
'TC':'Touring Colony',
|
||||
'FD':'Family Dinner',
|
||||
'ad':'Ad Spot',
|
||||
'pv':'Preview',
|
||||
'cr':'Credits'
|
||||
},
|
||||
('S2', 0): {'':'', 'mt':'Main Title'},
|
||||
('L', 0): {'':'',
|
||||
'demo':'Demonstration',
|
||||
'prop':'Property',
|
||||
'set': 'Set',
|
||||
'ext': 'Exterior Set',
|
||||
'int': 'Interior Set',
|
||||
'prac':'Practical',
|
||||
'char':'Character',
|
||||
'fx': 'Special Effect',
|
||||
'stock': 'Stock Animation'
|
||||
},
|
||||
None: ['']
|
||||
}
|
||||
|
||||
|
||||
def get_seq_ids(self, context):
|
||||
#
|
||||
# Note: To avoid the reference bug mentioned in the Blender documentation,
|
||||
# we only return values held in the global seq_id_table, which
|
||||
# should remain defined and therefore hold a reference to the strings.
|
||||
#
|
||||
if not context:
|
||||
seq_ids = seq_id_table[None]
|
||||
else:
|
||||
scene = context.scene
|
||||
series = scene.lunaprops.series_id
|
||||
episode = scene.lunaprops.episode_id
|
||||
if (series, episode) in seq_id_table:
|
||||
seq_ids = seq_id_table[(series, episode)]
|
||||
else:
|
||||
seq_ids = seq_id_table[None]
|
||||
seq_enum_items = [(s, s, seq_id_table[series,episode][s]) for s in seq_ids]
|
||||
return seq_enum_items
|
||||
|
||||
# Another hard-coded table -- for render profiles
|
||||
render_profile_table = {
|
||||
'previz': {
|
||||
'name': 'PreViz',
|
||||
'desc': 'GL/AVI Previz Render for Animatics',
|
||||
'engine':'gl',
|
||||
'version':'any',
|
||||
'fps': 30,
|
||||
'fps_div': 1000,
|
||||
'fps_skip': 1,
|
||||
'suffix': 'GL',
|
||||
'format': 'AVI',
|
||||
'freestyle': False
|
||||
},
|
||||
|
||||
'paint6': {
|
||||
'name': '6fps Paint',
|
||||
'desc': '6fps Simplified Paint-Only Render',
|
||||
'engine':'bi',
|
||||
'fps': 30,
|
||||
'fps_skip': 5,
|
||||
'suffix': 'P6',
|
||||
'format': 'AVI',
|
||||
'freestyle': False,
|
||||
'antialias': False,
|
||||
'motionblur': False
|
||||
},
|
||||
|
||||
'paint3': {
|
||||
'name': '3fps Paint',
|
||||
'desc': '3fps Simplified Paint-Only Render',
|
||||
'engine': 'bi',
|
||||
'fps': 30,
|
||||
'fps_skip': 10,
|
||||
'suffix': 'P3',
|
||||
'format': 'AVI',
|
||||
'freestyle': False,
|
||||
'antialias': False,
|
||||
'motionblur': False,
|
||||
},
|
||||
|
||||
'paint': {
|
||||
'name': '30fps Paint',
|
||||
'desc': '30fps Simplified Paint-Only Render',
|
||||
'engine': 'bi',
|
||||
'fps': 30,
|
||||
'fps_skip': 1,
|
||||
'suffix': 'PT',
|
||||
'format': 'AVI',
|
||||
'freestyle': False,
|
||||
'antialias': False,
|
||||
'motionblur': False
|
||||
},
|
||||
|
||||
'check': {
|
||||
'name': '1fps Check',
|
||||
'desc': '1fps Full-Features Check Renders',
|
||||
'engine': 'bi',
|
||||
'fps': 30,
|
||||
'fps_skip': 30,
|
||||
'suffix': 'CH',
|
||||
'format': 'JPG',
|
||||
'framedigits': 5,
|
||||
'freestyle': True,
|
||||
'antialias': 8
|
||||
},
|
||||
|
||||
'full': {
|
||||
'name': '30fps Full',
|
||||
'desc': 'Full Render with all Features Turned On',
|
||||
'engine': 'bi',
|
||||
'fps': 30,
|
||||
'fps_skip': 1,
|
||||
'suffix': '',
|
||||
'format': 'PNG',
|
||||
'framedigits': 5,
|
||||
'freestyle': True,
|
||||
'antialias': 8
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class LunaticsSceneProperties(bpy.types.PropertyGroup):
|
||||
"""
|
||||
Properties of the current scene.
|
||||
"""
|
||||
series_id = bpy.props.EnumProperty(
|
||||
items=[
|
||||
('S1', 'S1', 'Series One'),
|
||||
('S2', 'S2', 'Series Two'),
|
||||
('S3', 'S3', 'Series Three'),
|
||||
('A1', 'Aud','Audiodrama'),
|
||||
('L', 'Lib','Library')
|
||||
],
|
||||
name="Series",
|
||||
default='S1',
|
||||
description="Series/Season of Animated Series, Audiodrama, or Library"
|
||||
)
|
||||
|
||||
episode_id = bpy.props.IntProperty(
|
||||
name="Episode",
|
||||
default=0,
|
||||
description="Episode number (0 means multi-use), ignored for Library",
|
||||
min=0,
|
||||
max=1000,
|
||||
soft_max=18
|
||||
)
|
||||
|
||||
seq_id = bpy.props.EnumProperty(
|
||||
name='',
|
||||
items=get_seq_ids,
|
||||
description="Sequence ID"
|
||||
)
|
||||
|
||||
block_id = bpy.props.IntProperty(
|
||||
name='',
|
||||
default=1,
|
||||
min=0,
|
||||
max=20,
|
||||
soft_max=10,
|
||||
description="Block number"
|
||||
)
|
||||
|
||||
use_multicam = bpy.props.BoolProperty(
|
||||
name="Multicam",
|
||||
default=False,
|
||||
description="Use multicam camera/shot numbering?"
|
||||
)
|
||||
|
||||
cam_id = bpy.props.IntProperty(
|
||||
name="Cam",
|
||||
default=0,
|
||||
min=0,
|
||||
max=20,
|
||||
soft_max=10,
|
||||
description="Camera number"
|
||||
)
|
||||
|
||||
shot_id = bpy.props.EnumProperty(
|
||||
name='Shot',
|
||||
#items=[('NONE', '', 'Single')]+[(c,c,'Shot '+c) for c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'],
|
||||
items=[(c,c,'Shot '+c) for c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'],
|
||||
default='A',
|
||||
description="Shot ID, normally a single capital letter, can be empty, two letters for transitions"
|
||||
)
|
||||
|
||||
shot_name = bpy.props.StringProperty(
|
||||
name='Name',
|
||||
description='Short descriptive codename',
|
||||
maxlen=0
|
||||
)
|
||||
|
||||
bpy.utils.register_class(LunaticsSceneProperties)
|
||||
bpy.types.Scene.lunaprops = bpy.props.PointerProperty(type=LunaticsSceneProperties)
|
||||
|
||||
class LunaticsScenePanel(bpy.types.Panel):
|
||||
"""
|
||||
Add a panel to the Properties-Scene screen
|
||||
"""
|
||||
bl_idname = 'SCENE_PT_lunatics'
|
||||
bl_label = 'Lunatics Project'
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_context = 'scene'
|
||||
|
||||
def draw(self, context):
|
||||
lunaprops = bpy.context.scene.lunaprops
|
||||
self.layout.label(text='Lunatics! Project Properties')
|
||||
row = self.layout.row()
|
||||
row.prop(lunaprops, 'series_id')
|
||||
row.prop(lunaprops, 'episode_id')
|
||||
row = self.layout.row()
|
||||
row.prop(lunaprops, 'use_multicam')
|
||||
row = self.layout.row()
|
||||
row.prop(lunaprops, 'seq_id')
|
||||
row.prop(lunaprops, 'block_id')
|
||||
if lunaprops.use_multicam:
|
||||
row.prop(lunaprops, 'cam_id')
|
||||
row.prop(lunaprops, 'shot_id')
|
||||
row.prop(lunaprops, 'shot_name')
|
||||
|
||||
# Buttons
|
||||
|
||||
class RenderProfileSettings(bpy.types.PropertyGroup):
|
||||
"""
|
||||
Settings for Render Profiles control.
|
||||
"""
|
||||
render_profile = bpy.props.EnumProperty(
|
||||
name='Profile',
|
||||
items=[(k, v['name'], v['desc'])
|
||||
for k,v in render_profile_table.items()],
|
||||
description="Select from pre-defined profiles of render settings",
|
||||
default='full')
|
||||
|
||||
bpy.utils.register_class(RenderProfileSettings)
|
||||
bpy.types.Scene.render_profile_settings = bpy.props.PointerProperty(
|
||||
type=RenderProfileSettings)
|
||||
|
||||
class RenderProfilesOperator(bpy.types.Operator):
|
||||
"""
|
||||
Operator invoked implicitly when render profile is changed.
|
||||
"""
|
||||
bl_idname = 'render.render_profiles'
|
||||
bl_label = 'Apply Render Profile'
|
||||
bl_options = {'UNDO'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
scene = context.scene
|
||||
profile = render_profile_table[scene.render_profile_settings.render_profile]
|
||||
|
||||
render_profile.set_render_from_profile(scene, profile)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class RenderProfilesPanel(bpy.types.Panel):
|
||||
"""
|
||||
Add simple drop-down selector for generating common render settings with
|
||||
destination set according to project defaults.
|
||||
"""
|
||||
bl_idname = 'SCENE_PT_render_profiles'
|
||||
bl_label = 'Render Profiles'
|
||||
bl_space_type = 'PROPERTIES'
|
||||
bl_region_type = 'WINDOW'
|
||||
bl_context = 'render'
|
||||
|
||||
def draw(self, context):
|
||||
rps = bpy.context.scene.render_profile_settings
|
||||
row = self.layout.row()
|
||||
row.prop(rps, 'render_profile')
|
||||
row = self.layout.row()
|
||||
row.operator('render.render_profiles')
|
||||
|
||||
|
||||
class copy_animation(bpy.types.Operator):
|
||||
"""
|
||||
Copy animation from active object to selected objects (select source last!).
|
||||
|
||||
Useful for fixing broken proxy rigs (create a new proxy, and used this tool
|
||||
to copy all animation from the original -- avoids tedious/error-prone NLA work).
|
||||
|
||||
Can also migrate to a re-scaled rig.
|
||||
"""
|
||||
bl_idname = 'object.copy_anim'
|
||||
bl_label = 'Copy Animation'
|
||||
bl_options = {'UNDO'}
|
||||
|
||||
def invoke(self, context, event):
|
||||
#print("Copy NLA from selected armature to active armatures.")
|
||||
|
||||
src_ob = context.active_object
|
||||
tgt_obs = [ob for ob in context.selected_objects if ob != context.active_object]
|
||||
|
||||
# TODO
|
||||
# Are these type checks necessary?
|
||||
# Is there any reason to restrict this operator to armature objects?
|
||||
# I think there isn't.
|
||||
|
||||
if src_ob.type != 'ARMATURE':
|
||||
self.report({'WARNING'}, 'Cannot copy NLA data from object that is not an ARMATURE.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
tgt_arm_obs = []
|
||||
for ob in tgt_obs:
|
||||
if ob.type == 'ARMATURE':
|
||||
tgt_arm_obs.append(ob)
|
||||
if not tgt_arm_obs:
|
||||
self.report({'WARNING'}, 'No armature objects selected to copy animation data to.')
|
||||
return {'CANCELLED'}
|
||||
|
||||
copy_anim.copy_object_animation(src_ob, tgt_arm_obs,
|
||||
dopesheet=context.scene.copy_anim_settings.dopesheet,
|
||||
nla=context.scene.copy_anim_settings.nla,
|
||||
rescale=context.scene.copy_anim_settings.rescale,
|
||||
scale_factor=context.scene.copy_anim_settings.scale_factor,
|
||||
report=self.report)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
class copy_animation_settings(bpy.types.PropertyGroup):
|
||||
"""
|
||||
Settings for the 'copy_animation' operator.
|
||||
"""
|
||||
dopesheet = bpy.props.BoolProperty(
|
||||
name = "Dope Sheet",
|
||||
description = "Copy animation from Dope Sheet",
|
||||
default=True)
|
||||
|
||||
nla = bpy.props.BoolProperty(
|
||||
name = "NLA Strips",
|
||||
description = "Copy all strips from NLA Editor",
|
||||
default=True)
|
||||
|
||||
rescale = bpy.props.BoolProperty(
|
||||
name = "Re-Scale/Copy",
|
||||
description = "Make rescaled COPY of actions instead of LINK to original",
|
||||
default = False)
|
||||
|
||||
scale_factor = bpy.props.FloatProperty(
|
||||
name = "Scale",
|
||||
description = "Scale factor for scaling animation (Re-Scale w/ 1.0 copies actions)",
|
||||
default = 1.0)
|
||||
|
||||
bpy.utils.register_class(copy_animation_settings)
|
||||
bpy.types.Scene.copy_anim_settings = bpy.props.PointerProperty(type=copy_animation_settings)
|
||||
|
||||
class CharacterPanel(bpy.types.Panel):
|
||||
bl_space_type = "VIEW_3D" # window type panel is displayed in
|
||||
bl_context = "objectmode"
|
||||
bl_region_type = "TOOLS" # region of window panel is displayed in
|
||||
bl_label = "Character"
|
||||
bl_category = "ABX"
|
||||
|
||||
def draw(self, context):
|
||||
settings = bpy.context.scene.copy_anim_settings
|
||||
layout = self.layout.column(align = True)
|
||||
layout.label("Animation Data")
|
||||
layout.operator('object.copy_anim')
|
||||
layout.prop(settings, 'dopesheet')
|
||||
layout.prop(settings, 'nla')
|
||||
layout.prop(settings, 'rescale')
|
||||
layout.prop(settings, 'scale_factor')
|
||||
|
||||
|
||||
|
||||
|
||||
class lunatics_compositing_settings(bpy.types.PropertyGroup):
|
||||
"""
|
||||
Settings for the LX compositor tool.
|
||||
"""
|
||||
inkthru = bpy.props.BoolProperty(
|
||||
name = "Ink-Thru",
|
||||
description = "Support transparent Freestyle ink effect",
|
||||
default=True)
|
||||
|
||||
billboards = bpy.props.BoolProperty(
|
||||
name = "Billboards",
|
||||
description = "Support material pass for correct billboard inking",
|
||||
default = False)
|
||||
|
||||
sepsky = bpy.props.BoolProperty(
|
||||
name = "Separate Sky",
|
||||
description = "Render sky separately with compositing support (better shadows)",
|
||||
default = True)
|
||||
|
||||
bpy.utils.register_class(lunatics_compositing_settings)
|
||||
bpy.types.Scene.lx_compos_settings = bpy.props.PointerProperty(type=lunatics_compositing_settings)
|
||||
|
||||
class lunatics_compositing(bpy.types.Operator):
|
||||
"""
|
||||
Set up standard Lunatics scene compositing.
|
||||
"""
|
||||
bl_idname = "scene.lunatics_compos"
|
||||
bl_label = "Ink/Paint Config"
|
||||
bl_options = {'UNDO'}
|
||||
bl_description = "Set up standard Lunatics Ink/Paint compositing in scene"
|
||||
|
||||
def invoke(self, context, event):
|
||||
"""
|
||||
Add standard 'Lunatics!' shot compositing to the currently-selected scene.
|
||||
"""
|
||||
scene = context.scene
|
||||
|
||||
shot = std_lunatics_ink.LunaticsShot(scene,
|
||||
inkthru=context.scene.lx_compos_settings.inkthru,
|
||||
billboards=context.scene.lx_compos_settings.billboards,
|
||||
sepsky=context.scene.lx_compos_settings.sepsky )
|
||||
|
||||
shot.cfg_scene()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
# def draw(self, context):
|
||||
# settings = context.scene.lx_compos_settings
|
||||
# self.col = self.layout.col()
|
||||
# col.prop(settings, "inkthru", text="Ink Thru")
|
||||
# col.prop(settings, "billboards", text="Ink Thru")
|
||||
|
||||
|
||||
|
||||
class LunaticsPanel(bpy.types.Panel):
|
||||
bl_space_type = "VIEW_3D"
|
||||
bl_context = "objectmode"
|
||||
bl_region_type = "TOOLS"
|
||||
bl_label = "Lunatics"
|
||||
bl_category = "ABX"
|
||||
|
||||
def draw(self, context):
|
||||
settings = bpy.context.scene.lx_compos_settings
|
||||
layout = self.layout.column(align = True)
|
||||
layout.label("Compositing")
|
||||
layout.operator('scene.lunatics_compos')
|
||||
layout.prop(settings, 'inkthru', text="Ink-Thru")
|
||||
layout.prop(settings, 'billboards', text="Billboards")
|
||||
layout.prop(settings, 'sepsky', text="Separate Sky")
|
||||
|
||||
|
||||
def register():
|
||||
bpy.utils.register_module(__name__)
|
||||
|
||||
def unregister():
|
||||
bpy.utils.unregister_module(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
register()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,336 @@
|
|||
# accumulate.py
|
||||
"""
|
||||
Data structures for accumulating tree-structured data from multiple sources.
|
||||
|
||||
Data is acquired from file and directory names and also from yaml files in the
|
||||
tree. The yaml files are loaded in increasing priority from upper directories
|
||||
to the local one, starting from the highest level file to contain a "project_root"
|
||||
key.
|
||||
|
||||
The files named for their parent directory are assumed to be KitCAT files (i.e.
|
||||
"kitcat.yaml" and "<dirname>.yaml" are treated the same way). Only files named
|
||||
"abx.yaml" are assumed to be configuration files specific to ABX.
|
||||
|
||||
We collect these by going up the file path, and then load them coming down. If
|
||||
we find a "project_root" key, we ditch the previous data and start over. This way
|
||||
any project files found above the project root will be ignored.
|
||||
|
||||
As a use case: if we were to store a new project inside of another project, the
|
||||
new project's project_root would make it blind to the settings in the containing
|
||||
project. Other directories in the parent project would still go to the parent
|
||||
project's root. This avoids having the location the project is stored affect
|
||||
the project data.
|
||||
|
||||
The overall structure is a dictionary. When updating with new data, any element
|
||||
that is itself a dictionary is treated recursively (that is, it is updated with
|
||||
directory data when another dictionary is provided for the same key). If an
|
||||
element is a list, then data from successively-higher directories extends the
|
||||
list (see UnionList, below). If a scalar replaces a dictionary or list value in
|
||||
a more specific entry, then it clobbers it and any updated information in it.
|
||||
|
||||
@author: Terry Hancock
|
||||
|
||||
@copyright: 2019 Anansi Spaceworks.
|
||||
|
||||
@license: GNU General Public License, version 2.0 or later. (Python code)
|
||||
|
||||
@contact: digitante@gmail.com
|
||||
|
||||
Demo:
|
||||
|
||||
>>> import accumulate
|
||||
>>> T1 = accumulate.RecursiveDict(accumulate.TEST_DICT_1)
|
||||
>>> T2 = accumulate.RecursiveDict(accumulate.TEST_DICT_2)
|
||||
>>> import copy
|
||||
>>> Ta = copy.deepcopy(T1)
|
||||
>>> Tb = copy.deepcopy(T2)
|
||||
>>> Ta
|
||||
RecursiveDict({'A': 1, 'B': [1, 2, 3], 'C': {'a': 1, 'b': 2, 'c': 3}, 'D': {}, 'E': None, 'F': {'h': {'i': {'j': {'k': 'abcdefghijk'}}}}})
|
||||
>>> Tb
|
||||
RecursiveDict({'C': {'d': 4, 'e': 5, 'f': 6}, 'D': (1, 2, 3), 'B': [4, 5, 6], 'E': 0})
|
||||
>>> Ta.update(T2)
|
||||
>>> Ta
|
||||
RecursiveDict({'A': 1, 'B': [4, 5, 6, 1, 2, 3], 'C': {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6}, 'D': (1, 2, 3), 'E': 0, 'F': {'h': {'i': {'j': {'k': 'abcdefghijk'}}}}})
|
||||
>>> Tb.update(T1)
|
||||
>>> Tb
|
||||
RecursiveDict({'C': {'d': 4, 'e': 5, 'f': 6, 'a': 1, 'b': 2, 'c': 3}, 'D': {}, 'B': [1, 2, 3, 4, 5, 6], 'E': None, 'A': 1, 'F': {'h': {'i': {'j': {'k': 'abcdefghijk'}}}}})
|
||||
>>>
|
||||
|
||||
"""
|
||||
|
||||
TEST_DICT_1 = { 'A':1,
|
||||
'B':[1,2,3],
|
||||
'C':{'a':1, 'b':2, 'c':3},
|
||||
'D':{},
|
||||
'E':None,
|
||||
'F':{'h':{'i':{'j':{'k':'abcdefghijk'}}}},
|
||||
}
|
||||
|
||||
TEST_DICT_2 = { 'C':{'d':4, 'e':5, 'f':6},
|
||||
'D':(1,2,3),
|
||||
'B':[4,5,6],
|
||||
'E':0
|
||||
}
|
||||
|
||||
YAML_TEST = """
|
||||
A: 1
|
||||
B:
|
||||
- 4
|
||||
- 5
|
||||
- 6
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
C:
|
||||
a: 1
|
||||
b: 2
|
||||
c: 3
|
||||
d: 4
|
||||
e: 5
|
||||
f: 6
|
||||
D: (1, 2, 3)
|
||||
E: 0
|
||||
F:
|
||||
h:
|
||||
i:
|
||||
j:
|
||||
k: abcdefghijk
|
||||
"""
|
||||
|
||||
import os, collections.abc, re
|
||||
import yaml
|
||||
|
||||
wordre = re.compile(r'([A-Z]+[a-z]*|[a-z]+|[0-9]+)')
|
||||
|
||||
class OrderedSet(collections.abc.Set):
|
||||
"""
|
||||
List-based set from Python documentation example.
|
||||
"""
|
||||
def __init__(self, iterable):
|
||||
self.elements = lst = []
|
||||
for value in iterable:
|
||||
if value not in lst:
|
||||
lst.append(value)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.elements)
|
||||
|
||||
def __contains__(self, value):
|
||||
return value in self.elements
|
||||
|
||||
def __len__(self):
|
||||
return len(self.elements)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(list(self))
|
||||
|
||||
def union(self, other):
|
||||
return self.__or__(other)
|
||||
|
||||
def intersection(self, other):
|
||||
return self.__and__(other)
|
||||
|
||||
class UnionList(list):
|
||||
"""
|
||||
Special list-based collection, which implements a "union" operator similar
|
||||
to the one defined for sets. It only adds options from the other list
|
||||
which are not already in the current list.
|
||||
|
||||
Note that it is intentionally asymmetric. The initial list may repeat values
|
||||
and they will be kept, so it does not require the list to consist only of
|
||||
unique entries (unlike Set collections).
|
||||
|
||||
This allows us to use this type for loading list-oriented data from data
|
||||
files, which may or may not contain repetitions for different uses, but
|
||||
also makes accumulation idempotent (running the union twice will not
|
||||
increase the size of the result, because no new values will be found).
|
||||
"""
|
||||
def union(self, other):
|
||||
combined = UnionList(self)
|
||||
for element in other:
|
||||
if element not in self:
|
||||
combined.append(element)
|
||||
return combined
|
||||
|
||||
class RecursiveDict(collections.OrderedDict):
|
||||
"""
|
||||
A dictionary which updates recursively, updating any values which are
|
||||
themselves dictionaries when the replacement value is a dictionary, rather
|
||||
than replacing them, and treating any values which are themselves lists
|
||||
as UnionLists and applying the union operation to combine them
|
||||
(when the replacement value is also a list).
|
||||
"""
|
||||
def clear(self):
|
||||
for key in self:
|
||||
del self[key]
|
||||
|
||||
def update(self, mapping):
|
||||
for key in mapping:
|
||||
if key in self:
|
||||
if (isinstance(self[key], collections.abc.Mapping) and
|
||||
isinstance(mapping[key], collections.abc.Mapping)):
|
||||
# Subdictionary
|
||||
newvalue = RecursiveDict(self[key])
|
||||
newvalue.update(RecursiveDict(mapping[key]))
|
||||
self[key] = newvalue
|
||||
|
||||
elif ((isinstance(self[key], collections.abc.MutableSequence) or
|
||||
isinstance(self[key], collections.abc.Set)) and
|
||||
(isinstance(mapping[key], collections.abc.MutableSequence) or
|
||||
isinstance(mapping[key], collections.abc.Set))):
|
||||
# Sublist
|
||||
self[key] = UnionList(self[key]).union(UnionList(mapping[key]))
|
||||
|
||||
else: # scalar
|
||||
self[key] = mapping[key]
|
||||
|
||||
else: # new key
|
||||
self[key] = mapping[key]
|
||||
|
||||
def get_data(self):
|
||||
new = {}
|
||||
for key in self:
|
||||
if isinstance(self[key], RecursiveDict):
|
||||
new[key]=dict(self[key].get_data())
|
||||
elif isinstance(self[key], UnionList):
|
||||
new[key]=list(self[key])
|
||||
else:
|
||||
new[key]=self[key]
|
||||
return new
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
super().__setitem__(key, RecursiveDict(value))
|
||||
|
||||
elif isinstance(value, collections.abc.MutableSequence):
|
||||
super().__setitem__(key, UnionList(value))
|
||||
|
||||
else:
|
||||
super().__setitem__(key,value)
|
||||
|
||||
def __repr__(self, compact=False):
|
||||
s = ''
|
||||
if not compact:
|
||||
s = s + '%s(' % self.__class__.__name__
|
||||
s = s + '{'
|
||||
for key in self:
|
||||
if isinstance(self[key], RecursiveDict):
|
||||
s = s+"'%s'"%key + ': ' + "%s" % self[key].__repr__(compact=True) + ', '
|
||||
else:
|
||||
s = s+ "'%s'"%key + ': ' + "%s" % repr(self[key]) + ', '
|
||||
if s.endswith(', '): s= s[:-2]
|
||||
s = s + '}'
|
||||
if not compact:
|
||||
s = s + ')'
|
||||
return s
|
||||
|
||||
def from_yaml(self, yaml_string):
|
||||
self.update(yaml.safe_load(yaml_string))
|
||||
return self
|
||||
|
||||
def from_yaml_file(self, path):
|
||||
with open(path, 'rt') as yamlfile:
|
||||
self.update(yaml.safe_load(yamlfile))
|
||||
return self
|
||||
|
||||
def to_yaml(self):
|
||||
return yaml.dump(self.get_data())
|
||||
|
||||
def to_yaml_file(self, path):
|
||||
with open(path, 'wt') as yamlfile:
|
||||
yamlfile.write(yaml.dump(self.get_data()))
|
||||
|
||||
|
||||
#--------
|
||||
# Code for collecting the YAML files we need
|
||||
|
||||
|
||||
def collect_yaml_files(path, stems, dirmatch=False, sidecar=False, root='/'):
|
||||
"""
|
||||
Collect a list of file paths to YAML files.
|
||||
|
||||
Does not attempt to read or interpret the files.
|
||||
|
||||
@path: The starting point, typically the antecedent filename.
|
||||
@stems: File stem (or sequence of stems) we recognize (in priority order).
|
||||
@dirmatch: Also search for stems matching the containing directory name?
|
||||
@sidecar: Also search for stems matching the antecent filename's stem?
|
||||
@root: Top level directory to consider (do not search above this).
|
||||
|
||||
"Stem" means the name with any extension after "." removed (typically,
|
||||
the filetype).
|
||||
"""
|
||||
yaml_paths = []
|
||||
if type(stems) is str:
|
||||
stems = (stems,)
|
||||
|
||||
path = os.path.abspath(path)
|
||||
path, filename = os.path.split(path)
|
||||
if sidecar:
|
||||
filestem = os.path.splitext(filename)[0]
|
||||
sidecar_path = os.path.join(path, filestem + '.yaml')
|
||||
if os.path.isfile(sidecar_path):
|
||||
yaml_paths.append(sidecar_path)
|
||||
|
||||
while not os.path.abspath(path) == os.path.dirname(root):
|
||||
path, base = os.path.split(path)
|
||||
|
||||
if dirmatch:
|
||||
yaml_path = os.path.join(path, base, base + '.yaml')
|
||||
if os.path.isfile(yaml_path):
|
||||
yaml_paths.append(yaml_path)
|
||||
|
||||
for stem in stems:
|
||||
yaml_path = os.path.join(path, base, stem + '.yaml')
|
||||
if os.path.isfile(yaml_path):
|
||||
yaml_paths.append(yaml_path)
|
||||
|
||||
yaml_paths.reverse()
|
||||
return yaml_paths
|
||||
|
||||
|
||||
def has_project_root(yaml_path):
|
||||
with open(yaml_path, 'rt') as yaml_file:
|
||||
data = yaml.safe_load(yaml_file)
|
||||
if 'project_root' in data:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def trim_to_project_root(yaml_paths):
|
||||
for i in range(len(yaml_paths)-1,-1,-1):
|
||||
if has_project_root(yaml_paths[i]):
|
||||
return yaml_paths[i:]
|
||||
return yaml_paths
|
||||
|
||||
def get_project_root(yaml_paths):
|
||||
trimmed = trim_to_project_root(yaml_paths)
|
||||
if trimmed:
|
||||
return os.path.dirname(trimmed[0])
|
||||
else:
|
||||
# No project root was found!
|
||||
return '/'
|
||||
|
||||
def combine_yaml(yaml_paths):
|
||||
data = RecursiveDict()
|
||||
for path in yaml_paths:
|
||||
with open(path, 'rt') as yaml_file:
|
||||
data.update(yaml.safe_load(yaml_file))
|
||||
return data
|
||||
|
||||
def get_project_data(filepath):
|
||||
# First, get the KitCAT data.
|
||||
kitcat_paths = collect_yaml_files(filepath,
|
||||
('kitcat', 'project'), dirmatch=True, sidecar=True)
|
||||
|
||||
kitcat_data = combine_yaml(trim_to_project_root(kitcat_paths))
|
||||
|
||||
kitcat_root = get_project_root(kitcat_paths)
|
||||
|
||||
abx_data = combine_yaml(collect_yaml_files(filepath,
|
||||
'abx', root=kitcat_root))
|
||||
|
||||
return kitcat_root, kitcat_data, abx_data
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,169 @@
|
|||
# blender_context.py
|
||||
"""
|
||||
Contextual metadata acquired from internal values in a Blender file.
|
||||
|
||||
This module must be invoked from within Blender to work, as it relies on the bpy Blender API
|
||||
module and the currently-open Blender file's data graph in order to work.
|
||||
|
||||
It collects data about scenes, objects, groups, and other datablocks in the Blender file,
|
||||
as well as data encoded in text blocks in different formats. Overall file data is incorporated
|
||||
into a PropertyGroup attached to the "WindowManager" object identified as 'WinMan' (normally,
|
||||
it appears there is only ever one of these in a Blender file, but if there is more than one, this
|
||||
is the one that will be used).
|
||||
"""
|
||||
|
||||
import io
|
||||
import bpy, bpy.app, bpy.props, bpy.utils
|
||||
from bpy.app.handlers import persistent
|
||||
from accumulate import UnionList, RecursiveDict
|
||||
import yaml
|
||||
|
||||
def EnumFromList(schema, listname):
|
||||
return [(e, e.capitalize(), e.capitalize()) for e in schema[listname]]
|
||||
|
||||
prop_types = {
|
||||
'string':{
|
||||
'property': bpy.props.StringProperty,
|
||||
'keywords': { 'name', 'description', 'default', 'maxlen', 'options', 'subtype'},
|
||||
'translate': {
|
||||
'desc': ('description', None)}},
|
||||
'enum': {
|
||||
'property': bpy.props.EnumProperty,
|
||||
'keywords': { 'items', 'name', 'description', 'default', 'options'},
|
||||
'translate': {
|
||||
'desc': ('description', None),
|
||||
'items_from': ('items', EnumFromList)}},
|
||||
'int': {
|
||||
'property': bpy.props.IntProperty,
|
||||
'keywords': { 'name', 'description', 'default', 'min', 'max', 'soft_min', 'soft_max',
|
||||
'step', 'options', 'subtype'},
|
||||
'translate': {
|
||||
'desc': ('description', None)}},
|
||||
'float': {
|
||||
'property': bpy.props.FloatProperty,
|
||||
'keywords': { 'name', 'description', 'default', 'min', 'max', 'soft_min', 'soft_max',
|
||||
'step', 'options', 'subtype', 'precision', 'unit'},
|
||||
'translate': {
|
||||
'desc': ('description', None)}},
|
||||
'bool': {
|
||||
'property': bpy.props.BoolProperty,
|
||||
'keywords': { 'name', 'description', 'default', 'options', 'subtype'},
|
||||
'translate': {
|
||||
'desc': ('description', None)}}
|
||||
}
|
||||
|
||||
class AbxMeta(bpy.types.PropertyGroup):
|
||||
"""
|
||||
Metadata property group factory for attachment to Blender object types.
|
||||
Definitions come from a YAML source (or default defined below).
|
||||
"""
|
||||
default_schema = yaml.safe_load(io.StringIO("""\
|
||||
---
|
||||
blender:
|
||||
- id: project
|
||||
type: string
|
||||
level: project
|
||||
name: Project Name
|
||||
desc: Name of the project
|
||||
maxlen: 32
|
||||
|
||||
- id: project_title
|
||||
type: string
|
||||
level: project
|
||||
name: Project Title
|
||||
desc: Full title for the project
|
||||
maxlen: 64
|
||||
|
||||
- id: project_description
|
||||
type: string
|
||||
level: project
|
||||
name: Project Description
|
||||
desc: Brief description of the project
|
||||
maxlen: 128
|
||||
|
||||
- id: project_url
|
||||
type: list string
|
||||
level: project
|
||||
name: Project URL
|
||||
desc: URL for Project home page, or comma-separated list of Project URLs
|
||||
|
||||
- id: level
|
||||
type: enum
|
||||
items_from: levels
|
||||
name: Level
|
||||
desc: Level of the file in the project hierarchy
|
||||
|
||||
levels:
|
||||
- project
|
||||
- series
|
||||
- episode
|
||||
- seq
|
||||
- subseq
|
||||
- camera
|
||||
- shot
|
||||
- element
|
||||
- frame
|
||||
|
||||
hierarchies:
|
||||
- library
|
||||
- episodes
|
||||
"""))
|
||||
|
||||
def __new__(cls, schema=default_schema):
|
||||
class CustomPropertyGroup(bpy.types.PropertyGroup):
|
||||
pass
|
||||
for definition in schema['blender']:
|
||||
# Translate and filter parameters
|
||||
try:
|
||||
propmap = prop_types[definition['type']]
|
||||
except KeyError:
|
||||
# If no 'type' specified or 'type' not found, default to string:
|
||||
propmap = prop_types['string']
|
||||
|
||||
filtered = {}
|
||||
for param in definition:
|
||||
if 'translate' in propmap and param in propmap['translate']:
|
||||
filter = propmap['translate'][param][1]
|
||||
if callable(filter):
|
||||
# Filtered translation
|
||||
filtered[propmap['translate'][param][0]] = filter(schema, definition[param])
|
||||
else:
|
||||
# Simple translation
|
||||
filtered[propmap['translate'][param][0]] = definition[param]
|
||||
|
||||
# Create the Blender Property object
|
||||
kwargs = dict((key,filtered[key]) for key in propmap['keywords'] if key in filtered)
|
||||
setattr(CustomPropertyGroup, definition['id'], propmap['property'](**kwargs))
|
||||
|
||||
bpy.utils.register_class(CustomPropertyGroup)
|
||||
return(CustomPropertyGroup)
|
||||
|
||||
|
||||
|
||||
class BlenderContext(RecursiveDict):
|
||||
"""
|
||||
Dictionary accumulating data from sources within the currently-open Blender file.
|
||||
"""
|
||||
filepath = ''
|
||||
defaults = {}
|
||||
|
||||
def __init__(self):
|
||||
self.clear()
|
||||
|
||||
@classmethod
|
||||
def update(cls):
|
||||
try:
|
||||
cls.file_metadata = bpy.data.window_managers['WinMan'].metadata
|
||||
except AttributeError:
|
||||
bpy.data.window_managers['WinMan'].new(FileMeta())
|
||||
|
||||
|
||||
def clear(self):
|
||||
for key in self:
|
||||
del self[key]
|
||||
self.update(self.defaults)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# context.py
|
||||
"""
|
||||
Combines context sources to create AbxContext object (dictionary tree).
|
||||
"""
|
||||
|
||||
import bpy, bpy.app, bpy.data, bpy.ops
|
||||
|
||||
from bpy.app.handlers import persistent
|
||||
#from accumulate import UnionList, RecursiveDict
|
||||
|
||||
from . import file_context
|
||||
|
||||
if os.path.exists(bpy.data.filepath):
|
||||
BlendfileContext = file_context.FileContext(bpy.data.filepath)
|
||||
else:
|
||||
BlendfileContext = file_context.FileContext()
|
||||
|
||||
# Attach a handler to keep our filepath context up to date with Blender
|
||||
@persistent
|
||||
def update_handler(ctxt):
|
||||
BlendfileContext.update(bpy.data.filepath)
|
||||
|
||||
bpy.app.handlers.save_post.append(update_handler)
|
||||
bpy.app.handlers.load_post.append(update_handler)
|
||||
bpy.app.handlers.scene_update_post.append(update_handler)
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
# copy_anim.py
|
||||
"""
|
||||
Blender Python code to copy animation between armatures or proxy armatures.
|
||||
"""
|
||||
|
||||
import bpy, bpy.types, bpy.utils, bpy.props
|
||||
|
||||
#----------------------------------------
|
||||
## TOOLS
|
||||
# This might be moved into another module later
|
||||
|
||||
def copy_object_animation(sourceObj, targetObjs,
|
||||
dopesheet=False, nla=False, rescale=False, scale_factor=1.0,
|
||||
report=print):
|
||||
"""
|
||||
Copy Dope Sheet & NLA editor animation from active object to selected objects.
|
||||
Most useful with armatures. Assumes bones match. Can be rescaled in the process.
|
||||
|
||||
From StackExchange post:
|
||||
https://blender.stackexchange.com/questions/74183/how-can-i-copy-nla-tracks-from-one-armature-to-another
|
||||
"""
|
||||
for targetObj in targetObjs:
|
||||
if targetObj.animation_data is not None:
|
||||
targetObj.animation_data_clear()
|
||||
|
||||
targetObj.animation_data_create()
|
||||
|
||||
source_animation_data = sourceObj.animation_data
|
||||
target_animation_data = targetObj.animation_data
|
||||
|
||||
# copy the dopesheet animation (active animation)
|
||||
if dopesheet:
|
||||
report({'INFO'}, 'Copying Dopesheet animation')
|
||||
if source_animation_data.action is None:
|
||||
report({'WARNING'},
|
||||
"CLEARING target dope sheet - old animation saved with 'fake user'")
|
||||
if target_animation_data.action is not None:
|
||||
target_animation_data.action.use_fake_user = True
|
||||
target_animation_data.action = None
|
||||
else:
|
||||
if rescale:
|
||||
target_animation_data.action = copy_animation_action_with_rescale(
|
||||
source_animation_data.action, scale_factor)
|
||||
else:
|
||||
target_animation_data.action = copy_animation_action_with_rescale(
|
||||
source_animation_data.action, scale_factor)
|
||||
|
||||
target_animation_data.action.name = targetObj.name + 'Action'
|
||||
|
||||
if nla:
|
||||
report({'INFO'}, 'Copying NLA strips')
|
||||
if source_animation_data:
|
||||
# Create new NLA tracks based on the source
|
||||
for source_nla_track in source_animation_data.nla_tracks:
|
||||
target_nla_track = target_animation_data.nla_tracks.new()
|
||||
target_nla_track.name = source_nla_track.name
|
||||
# In each track, create action strips base on the source
|
||||
for source_action_strip in source_nla_track.strips:
|
||||
|
||||
if rescale:
|
||||
new_action = copy_animation_action_with_rescale(
|
||||
source_action_strip.action, scale_factor)
|
||||
else:
|
||||
new_action = source_action_strip.action
|
||||
|
||||
target_action_strip = target_nla_track.strips.new(
|
||||
new_action.name,
|
||||
source_action_strip.frame_start,
|
||||
new_action)
|
||||
|
||||
# For each strip, copy the properties -- EXCEPT the ones we
|
||||
# need to protect or can't copy
|
||||
# introspect property names (is there a better way to do this?)
|
||||
props = [p for p in dir(source_action_strip) if
|
||||
not p in ('action',)
|
||||
and not p.startswith('__') and not p.startswith('bl_')
|
||||
and source_action_strip.is_property_set(p)
|
||||
and not source_action_strip.is_property_readonly(p)
|
||||
and not source_action_strip.is_property_hidden(p)]
|
||||
for prop in props:
|
||||
setattr(target_action_strip, prop, getattr(source_action_strip, prop))
|
||||
|
||||
|
||||
# Adapted from reference:
|
||||
# https://www.reddit.com/r/blender/comments/eu3w6m/guide_how_to_scale_a_rigify_rig/
|
||||
#
|
||||
|
||||
def reset_armature_stretch_constraints(rig_object):
|
||||
"""
|
||||
Reset stretch-to constraints on an armature object - necessary after rescaling.
|
||||
"""
|
||||
bone_count = 0
|
||||
for bone in rig_object.pose.bones:
|
||||
for constraint in bone.constraints:
|
||||
if constraint.type == "STRETCH_TO":
|
||||
constraint.rest_length = 0
|
||||
bone_count += 1
|
||||
return bone_count
|
||||
|
||||
|
||||
def rescale_animation_action_in_place(action, scale_factor):
|
||||
"""
|
||||
Rescale a list of animation actions by a scale factor (in-place).
|
||||
"""
|
||||
#for fcurve in bpy.data.actions[action].fcurves:
|
||||
for fcurve in action.fcurves:
|
||||
data_path = fcurve.data_path
|
||||
if data_path.startswith('pose.bones[') and data_path.endswith('].location'):
|
||||
for p in fcurve.keyframe_points:
|
||||
p.co[1] *= scale_factor
|
||||
p.handle_left[1] *= scale_factor
|
||||
p.handle_right[1] *= scale_factor
|
||||
return action
|
||||
|
||||
def copy_animation_action_with_rescale(action, scale_factor):
|
||||
"""
|
||||
Copy an animation action, rescaled.
|
||||
"""
|
||||
new_action = action.copy()
|
||||
new_action.name = new_action.name[:-4]+'.rescale'
|
||||
return rescale_animation_action_in_place(new_action, scale_factor)
|
||||
|
||||
|
||||
|
||||
|
||||
#----------------------------------------
|
|
@ -0,0 +1,83 @@
|
|||
# render_profile.py
|
||||
"""
|
||||
Blender Python code to set parameters based on render profiles.
|
||||
"""
|
||||
|
||||
import bpy, bpy.types, bpy.utils, bpy.props
|
||||
|
||||
from . import std_lunatics_ink
|
||||
|
||||
render_formats = {
|
||||
# VERY simplified and limited list of formats from Blender that we need:
|
||||
# <API 'format'>: (<bpy file format>, <filename extension>),
|
||||
'PNG': ('PNG', 'png'),
|
||||
'JPG': ('JPEG', 'jpg'),
|
||||
'EXR': ('OPEN_EXR_MULTILAYER', 'exr'),
|
||||
'AVI': ('AVI_JPEG', 'avi'),
|
||||
'MKV': ('FFMPEG', 'mkv')
|
||||
}
|
||||
|
||||
|
||||
def set_render_from_profile(scene, profile):
|
||||
if 'engine' in profile:
|
||||
if profile['engine'] == 'gl':
|
||||
pass
|
||||
elif profile['engine'] == 'bi':
|
||||
scene.render.engine = 'BLENDER_RENDER'
|
||||
elif profile['engine'] == 'cycles':
|
||||
scene.render.engine = 'CYCLES'
|
||||
elif profile['engine'] == 'bge':
|
||||
scene.render.engine = 'BLENDER_GAME'
|
||||
|
||||
if 'fps' in profile:
|
||||
scene.render.fps = profile['fps']
|
||||
|
||||
if 'fps_skip' in profile:
|
||||
scene.frame_step = profile['fps_skip']
|
||||
|
||||
if 'format' in profile:
|
||||
scene.render.image_settings.file_format = render_formats[profile['format']][0]
|
||||
|
||||
if 'freestyle' in profile:
|
||||
scene.render.use_freestyle = profile['freestyle']
|
||||
|
||||
if 'antialias' in profile:
|
||||
if profile['antialias']:
|
||||
scene.render.use_antialiasing = True
|
||||
if profile['antialias'] in (5,8,11,16):
|
||||
scene.render.antialiasing_samples = str(profile['antialias'])
|
||||
else:
|
||||
scene.render.use_antialiasing = False
|
||||
|
||||
if 'motionblur' in profile:
|
||||
if profile['motionblur']:
|
||||
scene.render.use_motion_blur = True
|
||||
if type(profile['motionblur'])==int:
|
||||
scene.render.motion_blur_samples = profile['motionblur']
|
||||
else:
|
||||
scene.render.use_motion_blur = False
|
||||
|
||||
# Use Lunatics naming scheme for render target:
|
||||
if 'framedigits' in profile:
|
||||
framedigits = profile['framedigits']
|
||||
else:
|
||||
framedigits = 5
|
||||
|
||||
if 'suffix' in profile:
|
||||
suffix = profile['suffix']
|
||||
else:
|
||||
suffix = ''
|
||||
|
||||
if 'format' in profile:
|
||||
rdr_fmt = render_formats[profile['format']][0]
|
||||
ext = render_formats[profile['format']][1]
|
||||
else:
|
||||
rdr_fmt = 'PNG'
|
||||
ext = 'png'
|
||||
|
||||
path = std_lunatics_ink.LunaticsShot(scene).render_path(
|
||||
suffix=suffix, framedigits=framedigits, ext=ext, rdr_fmt=rdr_fmt)
|
||||
|
||||
scene.render.filepath = path
|
||||
|
||||
|
|
@ -0,0 +1,678 @@
|
|||
# std_lunatics_ink.py
|
||||
"""
|
||||
Functions to set up the standard ink and paint compositing arrangement
|
||||
for "Lunatics"
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import bpy, bpy.props, bpy.utils
|
||||
|
||||
# Hard-coded default parameters:
|
||||
INK_THICKNESS = 3
|
||||
INK_COLOR = (0,0,0)
|
||||
THRU_INK_THICKNESS = 2
|
||||
THRU_INK_COLOR = (20,100,50)
|
||||
|
||||
|
||||
|
||||
# TODO: probably should have a dialog somewhere that can change these through the UI?
|
||||
|
||||
class LunaticsShot(object):
|
||||
"""
|
||||
General class for Lunatics Blender Scene data.
|
||||
"""
|
||||
colorcode = {
|
||||
'paint': (1.00, 1.00, 1.00),
|
||||
'ink': (0.75, 0.50, 0.35),
|
||||
'thru': (0.35, 0.50, 0.75),
|
||||
'bb': (0.35, 0.75, 0.50),
|
||||
'bbthru': (0.35, 0.75, 0.75),
|
||||
'sky': (0.50, 0.25, 0.75),
|
||||
'compos': (0.75, 0.75, 0.75),
|
||||
'output': (0.35, 0.35, 0.35)
|
||||
}
|
||||
|
||||
def __init__(self, scene, inkthru=False, billboards=False, sepsky=False):
|
||||
self.scene = scene
|
||||
self.inkthru = bool(inkthru)
|
||||
self.billboards = bool(billboards)
|
||||
self.sepsky = bool(sepsky)
|
||||
|
||||
self.series_id = scene.lunaprops.series_id
|
||||
self.episode_id = scene.lunaprops.episode_id
|
||||
self.seq_id = scene.lunaprops.seq_id
|
||||
self.block_id = scene.lunaprops.block_id
|
||||
self.shot_id = scene.lunaprops.shot_id
|
||||
self.cam_id = scene.lunaprops.cam_id
|
||||
self.shot_name = scene.lunaprops.shot_name
|
||||
|
||||
self.render_root = '//../../Renders/'
|
||||
|
||||
@property
|
||||
def fullname(self):
|
||||
return self.designation + '-' + self.name
|
||||
|
||||
@property
|
||||
def designation(self):
|
||||
episode_code = "%2.2sE%2.2d" % (self.series_id, self.episode_id)
|
||||
return episode_code + '-' + self.shortname
|
||||
|
||||
@property
|
||||
def shortname(self):
|
||||
desig = str(self.seq_id) + '-' + str(self.block_id)
|
||||
if self.cam_id:
|
||||
desig = desig + '-Cam' + str(self.cam_id)
|
||||
if self.shot_id:
|
||||
desig = desig + '-' + str(self.shot_id)
|
||||
return desig
|
||||
|
||||
@property
|
||||
def scene_name(self):
|
||||
if self.shot_name:
|
||||
return self.shortname + ' ' + self.shot_name
|
||||
else:
|
||||
return self.shortname
|
||||
|
||||
def render_path(self, suffix='', framedigits=5, ext='png', rdr_fmt='PNG'):
|
||||
if suffix:
|
||||
suffix = '-' + suffix
|
||||
if rdr_fmt in ('AVI', 'MKV'):
|
||||
path = os.path.join(self.render_root, suffix,
|
||||
self.designation + suffix + '.' + ext)
|
||||
else:
|
||||
path = os.path.join(self.render_root, suffix, self.designation,
|
||||
self.designation + suffix + '-f' + '#'*framedigits + '.' + ext)
|
||||
return path
|
||||
|
||||
def cfg_scene(self, scene=None, thru=True, exr=True, multicam=False, role='shot'):
|
||||
if not scene:
|
||||
scene = self.scene
|
||||
|
||||
scene.name = self.scene_name
|
||||
scene.render.filepath = self.render_path()
|
||||
#os.path.join(self.render_root, 'PNG', self.designation, self.designation + '-f#####.png')
|
||||
scene.render.image_settings.file_format='PNG'
|
||||
scene.render.image_settings.compression = 50
|
||||
scene.render.image_settings.color_mode = 'RGB'
|
||||
scene.render.use_freestyle = True
|
||||
|
||||
# Create Paint & Ink Render Layers
|
||||
for rlayer in scene.render.layers:
|
||||
rlayer.name = '~' + rlayer.name
|
||||
rlayer.use = False
|
||||
# Rename & turn off existing layers (but don't delete, in case they were wanted)
|
||||
|
||||
scene.render.layers.new('Paint')
|
||||
self.cfg_paint(scene.render.layers['Paint'])
|
||||
|
||||
scene.render.layers.new('Ink')
|
||||
self.cfg_ink(scene.render.layers['Ink'],
|
||||
thickness=INK_THICKNESS, color=INK_COLOR)
|
||||
|
||||
if self.inkthru:
|
||||
scene.render.layers.new('Ink-Thru')
|
||||
self.cfg_ink(scene.render.layers['Ink-Thru'],
|
||||
thickness=THRU_INK_THICKNESS, color=THRU_INK_COLOR)
|
||||
|
||||
if self.billboards:
|
||||
scene.render.layers.new('BB-Alpha')
|
||||
self.cfg_bbalpha(scene.render.layers['BB-Alpha'])
|
||||
|
||||
scene.render.layers.new('BB-Mat')
|
||||
self.cfg_bbmat(scene.render.layers['BB-Mat'], thru=False)
|
||||
|
||||
if self.billboards and self.inkthru:
|
||||
scene.render.layers.new('BB-Mat-Thru')
|
||||
self.cfg_bbmat(scene.render.layers['BB-Mat-Thru'], thru=True)
|
||||
|
||||
if self.sepsky:
|
||||
scene.render.layers.new('Sky')
|
||||
self.cfg_sky(scene.render.layers['Sky'])
|
||||
|
||||
self.cfg_nodes(scene)
|
||||
|
||||
def _new_rlayer_in(self, name, scene, rlayer, location, color):
|
||||
tree = scene.node_tree
|
||||
rlayer_in = tree.nodes.new('CompositorNodeRLayers')
|
||||
rlayer_in.name = '_'.join([n.lower() for n in name.split('-')])+'_in'
|
||||
rlayer_in.label = name+'-In'
|
||||
rlayer_in.scene = scene
|
||||
rlayer_in.layer = rlayer
|
||||
rlayer_in.color = color
|
||||
rlayer_in.use_custom_color = True
|
||||
rlayer_in.location = location
|
||||
return rlayer_in
|
||||
|
||||
def cfg_nodes(self, scene):
|
||||
# Create Compositing Node Tree
|
||||
scene.use_nodes = True
|
||||
tree = scene.node_tree
|
||||
# clear default nodes
|
||||
for node in tree.nodes:
|
||||
tree.nodes.remove(node)
|
||||
|
||||
# Paint RenderLayer Nodes
|
||||
paint_in = self._new_rlayer_in('Paint', scene, 'Paint',
|
||||
(0,1720), self.colorcode['paint'])
|
||||
|
||||
if self.sepsky:
|
||||
sky_in = self._new_rlayer_in('Sky', scene, 'Sky',
|
||||
(0, 1200), self.colorcode['sky'])
|
||||
|
||||
# Configure EXR format
|
||||
exr_paint = tree.nodes.new('CompositorNodeOutputFile')
|
||||
exr_paint.name = 'exr_paint'
|
||||
exr_paint.label = 'Paint EXR'
|
||||
exr_paint.location = (300,1215)
|
||||
exr_paint.color = self.colorcode['paint']
|
||||
exr_paint.use_custom_color = True
|
||||
exr_paint.format.file_format = 'OPEN_EXR_MULTILAYER'
|
||||
exr_paint.format.color_mode = 'RGBA'
|
||||
exr_paint.format.color_depth = '16'
|
||||
exr_paint.format.exr_codec = 'ZIP'
|
||||
exr_paint.base_path = os.path.join(self.render_root, 'EXR',
|
||||
self.designation, self.designation + '-Paint-f#####' + '.exr')
|
||||
if 'Image' in exr_paint.layer_slots:
|
||||
exr_paint.layer_slots.remove(exr_paint.inputs['Image'])
|
||||
|
||||
# Create EXR layers and connect to render passes
|
||||
rpasses = ['Image', 'Depth', 'Normal', 'Vector',
|
||||
'Spec', 'Shadow','Reflect','Emit']
|
||||
for rpass in rpasses:
|
||||
exr_paint.layer_slots.new(rpass)
|
||||
tree.links.new(paint_in.outputs[rpass], exr_paint.inputs[rpass])
|
||||
|
||||
if self.sepsky:
|
||||
exr_paint.layer_slots.new('Sky')
|
||||
tree.links.new(sky_in.outputs['Image'], exr_paint.inputs['Sky'])
|
||||
|
||||
# Ink RenderLayer Nodes
|
||||
ink_in = self._new_rlayer_in('Ink', scene, 'Ink',
|
||||
(590, 1275), self.colorcode['ink'])
|
||||
|
||||
if self.inkthru:
|
||||
thru_in = self._new_rlayer_in('Thru', scene, 'Ink-Thru',
|
||||
(590, 990), self.colorcode['thru'])
|
||||
|
||||
if self.billboards:
|
||||
bb_in = self._new_rlayer_in('BB', scene, 'BB-Alpha',
|
||||
(0, 870), self.colorcode['bb'])
|
||||
|
||||
bb_mat = self._new_rlayer_in('BB-Mat', scene, 'BB-Mat',
|
||||
(0, 590), self.colorcode['bb'])
|
||||
|
||||
if self.inkthru and self.billboards:
|
||||
bb_mat_thru = self._new_rlayer_in('BB-Mat-Thru', scene, 'BB-Mat-Thru',
|
||||
(0, 280), self.colorcode['bbthru'])
|
||||
|
||||
# Ink EXR
|
||||
exr_ink = tree.nodes.new('CompositorNodeOutputFile')
|
||||
exr_ink.name = 'exr_ink'
|
||||
exr_ink.label = 'Ink EXR'
|
||||
exr_ink.location = (1150,700)
|
||||
exr_ink.color = self.colorcode['ink']
|
||||
exr_ink.use_custom_color = True
|
||||
exr_ink.format.file_format = 'OPEN_EXR_MULTILAYER'
|
||||
exr_ink.format.color_mode = 'RGBA'
|
||||
exr_ink.format.color_depth = '16'
|
||||
exr_ink.format.exr_codec = 'ZIP'
|
||||
exr_ink.base_path = os.path.join(self.render_root, 'EXR',
|
||||
self.designation, self.designation + '-Ink-f#####' + '.exr')
|
||||
|
||||
# Create EXR Ink layers and connect
|
||||
if 'Image' in exr_ink.layer_slots:
|
||||
exr_ink.layer_slots.remove(exr_ink.inputs['Image'])
|
||||
exr_ink.layer_slots.new('Ink')
|
||||
tree.links.new(ink_in.outputs['Image'], exr_ink.inputs['Ink'])
|
||||
|
||||
if self.inkthru:
|
||||
exr_ink.layer_slots.new('Ink-Thru')
|
||||
tree.links.new(thru_in.outputs['Image'], exr_ink.inputs['Ink-Thru'])
|
||||
|
||||
if self.billboards:
|
||||
exr_ink.layer_slots.new('BB-Alpha')
|
||||
tree.links.new(bb_in.outputs['Alpha'], exr_ink.inputs['BB-Alpha'])
|
||||
|
||||
exr_ink.layer_slots.new('BB-Mat')
|
||||
tree.links.new(bb_mat.outputs['IndexMA'], exr_ink.inputs['BB-Mat'])
|
||||
|
||||
if self.inkthru and self.billboards:
|
||||
exr_ink.layer_slots.new('BB-Mat-Thru')
|
||||
tree.links.new(bb_mat_thru.outputs['IndexMA'], exr_ink.inputs['BB-Mat-Thru'])
|
||||
|
||||
|
||||
# Preview Compositing
|
||||
mix_shadow = tree.nodes.new('CompositorNodeMixRGB')
|
||||
mix_shadow.name = 'mix_shadow'
|
||||
mix_shadow.label = 'Mix-Shadow'
|
||||
mix_shadow.location = (510,1820)
|
||||
mix_shadow.color = self.colorcode['compos']
|
||||
mix_shadow.use_custom_color = True
|
||||
mix_shadow.blend_type = 'MULTIPLY'
|
||||
mix_shadow.inputs['Fac'].default_value = 0.6
|
||||
mix_shadow.use_clamp = True
|
||||
tree.links.new(paint_in.outputs['Image'], mix_shadow.inputs[1])
|
||||
tree.links.new(paint_in.outputs['Shadow'], mix_shadow.inputs[2])
|
||||
|
||||
mix_reflect = tree.nodes.new('CompositorNodeMixRGB')
|
||||
mix_reflect.name = 'mix_reflect'
|
||||
mix_reflect.label = 'Mix-Reflect'
|
||||
mix_reflect.location = (910, 1620)
|
||||
mix_reflect.color = self.colorcode['compos']
|
||||
mix_reflect.use_custom_color = True
|
||||
mix_reflect.blend_type = 'ADD'
|
||||
mix_reflect.inputs['Fac'].default_value = 1.1
|
||||
mix_reflect.use_clamp = True
|
||||
tree.links.new(paint_in.outputs['Reflect'], mix_reflect.inputs[2])
|
||||
|
||||
mix_emit = tree.nodes.new('CompositorNodeMixRGB')
|
||||
mix_emit.name = 'mix_emit'
|
||||
mix_emit.label = 'Mix-Emit'
|
||||
mix_emit.location = (1110, 1520)
|
||||
mix_emit.blend_type = 'ADD'
|
||||
mix_emit.inputs['Fac'].default_value = 1.1
|
||||
mix_emit.use_clamp = True
|
||||
tree.links.new(mix_reflect.outputs['Image'], mix_emit.inputs[1])
|
||||
tree.links.new(paint_in.outputs['Emit'], mix_emit.inputs[2])
|
||||
|
||||
if self.sepsky:
|
||||
sky_mix = tree.nodes.new('CompositorNodeMixRGB')
|
||||
sky_mix.name = 'sky_mix'
|
||||
sky_mix.label = 'Sky Mix'
|
||||
sky_mix.location = (710,1720)
|
||||
sky_mix.color = self.colorcode['sky']
|
||||
sky_mix.use_custom_color = True
|
||||
sky_mix.blend_type = 'MIX'
|
||||
sky_mix.use_clamp = True
|
||||
tree.links.new(sky_in.outputs['Image'], sky_mix.inputs[1])
|
||||
tree.links.new(paint_in.outputs['Alpha'], sky_mix.inputs['Fac'])
|
||||
tree.links.new(mix_shadow.outputs['Image'], sky_mix.inputs[2])
|
||||
tree.links.new(sky_mix.outputs['Image'], mix_reflect.inputs[1])
|
||||
else:
|
||||
tree.links.new(mix_shadow.outputs['Image'], mix_reflect.inputs[1])
|
||||
|
||||
if self.billboards:
|
||||
mat_idx = tree.nodes.new('CompositorNodeIDMask')
|
||||
mat_idx.name = "mat_idx"
|
||||
mat_idx.label = "BB-ID"
|
||||
mat_idx.location = (260, 670)
|
||||
mat_idx.index = 1
|
||||
mat_idx.use_antialiasing = True
|
||||
mat_idx.color = self.colorcode['bb']
|
||||
mat_idx.use_custom_color = True
|
||||
tree.links.new(bb_mat.outputs['IndexMA'], mat_idx.inputs['ID value'])
|
||||
|
||||
combine_bb_ma = tree.nodes.new('CompositorNodeMath')
|
||||
combine_bb_ma.name = 'combine_bb_ma'
|
||||
combine_bb_ma.label = 'Material x BB'
|
||||
combine_bb_ma.location = (440,670)
|
||||
combine_bb_ma.color = self.colorcode['bb']
|
||||
combine_bb_ma.use_custom_color = True
|
||||
combine_bb_ma.operation = 'MULTIPLY'
|
||||
combine_bb_ma.use_clamp = True
|
||||
tree.links.new(mat_idx.outputs['Alpha'], combine_bb_ma.inputs[0])
|
||||
tree.links.new(bb_in.outputs['Alpha'], combine_bb_ma.inputs[1])
|
||||
|
||||
invert_bb_mask = tree.nodes.new('CompositorNodeInvert')
|
||||
invert_bb_mask.name = 'invert_bb_mask'
|
||||
invert_bb_mask.label = 'Invert Mask'
|
||||
invert_bb_mask.location = (650,670)
|
||||
invert_bb_mask.color = self.colorcode['bb']
|
||||
invert_bb_mask.use_custom_color = True
|
||||
invert_bb_mask.invert_rgb = True
|
||||
tree.links.new(combine_bb_ma.outputs['Value'], invert_bb_mask.inputs['Color'])
|
||||
|
||||
bb_ink_mask = tree.nodes.new('CompositorNodeMath')
|
||||
bb_ink_mask.name = 'bb_ink_mask'
|
||||
bb_ink_mask.label = 'BB Ink Mask'
|
||||
bb_ink_mask.location = (1150,1315)
|
||||
bb_ink_mask.color = self.colorcode['bb']
|
||||
bb_ink_mask.use_custom_color = True
|
||||
bb_ink_mask.operation = 'MULTIPLY'
|
||||
bb_ink_mask.use_clamp = True
|
||||
tree.links.new(invert_bb_mask.outputs['Color'], bb_ink_mask.inputs[0])
|
||||
|
||||
blur_ink = tree.nodes.new('CompositorNodeBlur')
|
||||
blur_ink.name = 'blur_ink'
|
||||
blur_ink.label = 'Blur-Ink'
|
||||
blur_ink.location = (1620, 1110)
|
||||
blur_ink.color = self.colorcode['ink']
|
||||
blur_ink.use_custom_color = True
|
||||
blur_ink.filter_type = 'FAST_GAUSS'
|
||||
blur_ink.size_x = 1.0
|
||||
blur_ink.size_y = 1.0
|
||||
blur_ink.use_extended_bounds = False
|
||||
blur_ink.inputs['Size'].default_value = 1.0
|
||||
|
||||
if self.inkthru:
|
||||
merge_ink_ao = tree.nodes.new('CompositorNodeAlphaOver')
|
||||
merge_ink_ao.name = 'merge_ink'
|
||||
merge_ink_ao.label = 'Merge-Ink'
|
||||
merge_ink_ao.location = (1150,910)
|
||||
merge_ink_ao.color = self.colorcode['thru']
|
||||
merge_ink_ao.use_custom_color = True
|
||||
merge_ink_ao.use_premultiply = False
|
||||
merge_ink_ao.premul = 0.0
|
||||
merge_ink_ao.inputs['Fac'].default_value = 1.0
|
||||
tree.links.new(ink_in.outputs['Image'], merge_ink_ao.inputs[1])
|
||||
tree.links.new(thru_in.outputs['Image'], merge_ink_ao.inputs[2])
|
||||
tree.links.new(merge_ink_ao.outputs['Image'], blur_ink.inputs['Image'])
|
||||
else:
|
||||
tree.links.new(ink_in.outputs['Image'], blur_ink.inputs['Image'])
|
||||
|
||||
overlay_ink = tree.nodes.new('CompositorNodeAlphaOver')
|
||||
overlay_ink.name = 'Overlay Ink'
|
||||
overlay_ink.label = 'Overlay Ink'
|
||||
overlay_ink.location = (1820,1315)
|
||||
overlay_ink.color = self.colorcode['compos']
|
||||
overlay_ink.use_custom_color = True
|
||||
overlay_ink.use_premultiply = False
|
||||
overlay_ink.premul = 0.0
|
||||
overlay_ink.inputs['Fac'].default_value = 1.0
|
||||
tree.links.new(mix_emit.outputs['Image'], overlay_ink.inputs[1])
|
||||
tree.links.new(blur_ink.outputs['Image'], overlay_ink.inputs[2])
|
||||
|
||||
if self.billboards:
|
||||
tree.links.new(ink_in.outputs['Alpha'], bb_ink_mask.inputs[1])
|
||||
tree.links.new(bb_ink_mask.outputs['Value'], overlay_ink.inputs['Fac'])
|
||||
|
||||
if self.inkthru and self.billboards:
|
||||
mat_idx_thru = tree.nodes.new('CompositorNodeIDMask')
|
||||
mat_idx_thru.name = "mat_idx_thru"
|
||||
mat_idx_thru.label = "BB-ID-Thru"
|
||||
mat_idx_thru.location = (260, 425)
|
||||
mat_idx_thru.index = 1
|
||||
mat_idx_thru.use_antialiasing = True
|
||||
mat_idx_thru.color = self.colorcode['bbthru']
|
||||
mat_idx_thru.use_custom_color = True
|
||||
tree.links.new(bb_mat_thru.outputs['IndexMA'], mat_idx_thru.inputs['ID value'])
|
||||
|
||||
combine_bbthru_ma = tree.nodes.new('CompositorNodeMath')
|
||||
combine_bbthru_ma.name = 'combine_bbthru_ma'
|
||||
combine_bbthru_ma.label = 'Material x BB-Thru'
|
||||
combine_bbthru_ma.location = (440,425)
|
||||
combine_bbthru_ma.color = self.colorcode['bbthru']
|
||||
combine_bbthru_ma.use_custom_color = True
|
||||
combine_bbthru_ma.operation = 'MULTIPLY'
|
||||
combine_bbthru_ma.use_clamp = True
|
||||
tree.links.new(mat_idx_thru.outputs['Alpha'], combine_bbthru_ma.inputs[0])
|
||||
tree.links.new(bb_in.outputs['Alpha'], combine_bbthru_ma.inputs[1])
|
||||
|
||||
invert_bbthru_mask = tree.nodes.new('CompositorNodeInvert')
|
||||
invert_bbthru_mask.name = 'invert_bbthru_mask'
|
||||
invert_bbthru_mask.label = 'Invert Mask'
|
||||
invert_bbthru_mask.location = (650,425)
|
||||
invert_bbthru_mask.color = self.colorcode['bbthru']
|
||||
invert_bbthru_mask.use_custom_color = True
|
||||
invert_bbthru_mask.invert_rgb = True
|
||||
tree.links.new(combine_bbthru_ma.outputs['Value'], invert_bbthru_mask.inputs['Color'])
|
||||
|
||||
bb_thru_mask = tree.nodes.new('CompositorNodeMath')
|
||||
bb_thru_mask.name = 'bb_thru_mask'
|
||||
bb_thru_mask.label = 'BB Ink Thru Mask'
|
||||
bb_thru_mask.location = (1150,1115)
|
||||
bb_thru_mask.color = self.colorcode['bbthru']
|
||||
bb_thru_mask.use_custom_color = True
|
||||
bb_thru_mask.operation = 'MULTIPLY'
|
||||
bb_thru_mask.use_clamp = True
|
||||
tree.links.new(thru_in.outputs['Alpha'], bb_thru_mask.inputs[0])
|
||||
tree.links.new(invert_bbthru_mask.outputs['Color'], bb_thru_mask.inputs[1])
|
||||
|
||||
merge_bb_ink_masks = tree.nodes.new('CompositorNodeMath')
|
||||
merge_bb_ink_masks.name = 'merge_bb_ink_masks'
|
||||
merge_bb_ink_masks.label = 'Merge BB Ink Masks'
|
||||
merge_bb_ink_masks.location = (1415, 1215)
|
||||
merge_bb_ink_masks.color = self.colorcode['bbthru']
|
||||
merge_bb_ink_masks.use_custom_color = True
|
||||
merge_bb_ink_masks.operation = 'ADD'
|
||||
merge_bb_ink_masks.use_clamp = True
|
||||
tree.links.new(bb_ink_mask.outputs['Value'], merge_bb_ink_masks.inputs[0])
|
||||
tree.links.new(bb_thru_mask.outputs['Value'], merge_bb_ink_masks.inputs[1])
|
||||
|
||||
tree.links.new(merge_bb_ink_masks.outputs['Value'], overlay_ink.inputs['Fac'])
|
||||
|
||||
composite = tree.nodes.new('CompositorNodeComposite')
|
||||
composite.name = 'Composite'
|
||||
composite.label = 'Preview Render'
|
||||
composite.location = (2050,1215)
|
||||
composite.color = self.colorcode['output']
|
||||
composite.use_custom_color = True
|
||||
composite.use_alpha = True
|
||||
composite.inputs['Alpha'].default_value = 1.0
|
||||
composite.inputs['Z'].default_value = 1.0
|
||||
tree.links.new(overlay_ink.outputs['Image'], composite.inputs['Image'])
|
||||
|
||||
def _cfg_renderlayer(self, rlayer,
|
||||
includes=False, passes=False, excludes=False,
|
||||
layers=range(20)):
|
||||
# Utility to set all the includes and passes on or off, initially
|
||||
|
||||
# Weird Includes (we never use these -- always have to turn these on explicitly)
|
||||
rlayer.use_zmask = False
|
||||
rlayer.invert_zmask = False
|
||||
rlayer.use_all_z = False
|
||||
|
||||
# Includes
|
||||
rlayer.use_solid = includes
|
||||
rlayer.use_halo = includes
|
||||
rlayer.use_ztransp = includes
|
||||
rlayer.use_sky = includes
|
||||
rlayer.use_edge_enhance = includes
|
||||
rlayer.use_strand = includes
|
||||
rlayer.use_freestyle = includes
|
||||
|
||||
# Passes
|
||||
rlayer.use_pass_combined = passes
|
||||
rlayer.use_pass_z = passes
|
||||
rlayer.use_pass_vector = passes
|
||||
rlayer.use_pass_normal = passes
|
||||
|
||||
rlayer.use_pass_uv = passes
|
||||
rlayer.use_pass_mist = passes
|
||||
rlayer.use_pass_object_index = passes
|
||||
rlayer.use_pass_material_index = passes
|
||||
rlayer.use_pass_color = passes
|
||||
|
||||
rlayer.use_pass_diffuse = passes
|
||||
rlayer.use_pass_specular = passes
|
||||
rlayer.use_pass_shadow = passes
|
||||
rlayer.use_pass_emit = passes
|
||||
|
||||
rlayer.use_pass_ambient_occlusion = passes
|
||||
rlayer.use_pass_environment = passes
|
||||
rlayer.use_pass_indirect = passes
|
||||
|
||||
rlayer.use_pass_reflection = passes
|
||||
rlayer.use_pass_refraction = passes
|
||||
|
||||
# Exclusions
|
||||
rlayer.exclude_specular = excludes
|
||||
rlayer.exclude_shadow = excludes
|
||||
rlayer.exclude_emit = excludes
|
||||
rlayer.exclude_ambient_occlusion = excludes
|
||||
rlayer.exclude_environment = excludes
|
||||
rlayer.exclude_indirect = excludes
|
||||
rlayer.exclude_reflection = excludes
|
||||
rlayer.exclude_refraction = excludes
|
||||
|
||||
for i in range(20):
|
||||
if i in layers:
|
||||
rlayer.layers[i] = True
|
||||
else:
|
||||
rlayer.layers[i] = False
|
||||
|
||||
|
||||
def cfg_paint(self, paint_layer, name="Paint"):
|
||||
|
||||
self._cfg_renderlayer(paint_layer,
|
||||
includes=True, passes=False, excludes=False,
|
||||
layers = (0,1,2,3,4, 5,6,7, 10,11,12,13,14))
|
||||
|
||||
# Includes
|
||||
if self.sepsky:
|
||||
paint_layer.use_sky = False
|
||||
|
||||
paint_layer.use_freestyle = False
|
||||
|
||||
# Passes
|
||||
paint_layer.use_pass_combined = True
|
||||
paint_layer.use_pass_z = True
|
||||
paint_layer.use_pass_vector = True
|
||||
paint_layer.use_pass_normal = True
|
||||
|
||||
paint_layer.use_pass_shadow = True
|
||||
paint_layer.exclude_shadow = True
|
||||
|
||||
paint_layer.use_pass_emit = True
|
||||
paint_layer.exclude_emit = True
|
||||
|
||||
paint_layer.use_pass_specular = True
|
||||
paint_layer.exclude_specular = True
|
||||
|
||||
paint_layer.use_pass_reflection = True
|
||||
paint_layer.exclude_reflection = True
|
||||
|
||||
|
||||
def cfg_bbalpha(self, bb_render_layer):
|
||||
self._cfg_renderlayer(bb_render_layer,
|
||||
includes=False, passes=False, excludes=False,
|
||||
layers=(5,6, 14))
|
||||
# Includes
|
||||
bb_render_layer.use_solid = True
|
||||
bb_render_layer.use_ztransp = True
|
||||
# Passes
|
||||
bb_render_layer.use_pass_combined = True
|
||||
|
||||
def cfg_bbmat(self, bb_mat_layer, thru=False):
|
||||
self._cfg_renderlayer(bb_mat_layer,
|
||||
includes=False, passes=False, excludes=False,
|
||||
layers=(0,1,2,3, 5,6,7, 10,11,12,13,14, 15,16))
|
||||
# Includes
|
||||
bb_mat_layer.use_solid = True
|
||||
bb_mat_layer.use_ztransp = True
|
||||
|
||||
# Passes
|
||||
bb_mat_layer.use_pass_combined = True
|
||||
bb_mat_layer.use_pass_material_index = True
|
||||
|
||||
if not thru:
|
||||
bb_mat_layer.layers[4] = True
|
||||
|
||||
|
||||
def cfg_sky(self, sky_render_layer):
|
||||
self._cfg_renderlayer(sky_render_layer,
|
||||
includes=False, passes=False, excludes=False,
|
||||
layers=(0,1,2,3,4, 5,6,7, 10,11,12,13,14))
|
||||
# Includes
|
||||
sky_render_layer.use_sky = True
|
||||
# Passes
|
||||
sky_render_layer.use_pass_combined = True
|
||||
|
||||
|
||||
def cfg_ink(self, ink_layer, name="Ink", thickness=3, color=(0,0,0)):
|
||||
self._cfg_renderlayer(ink_layer,
|
||||
includes=False, passes=False, excludes=False,
|
||||
layers=(0,1,2,3, 5,6,7, 10,11,12,13, 15,16))
|
||||
# Includes
|
||||
ink_layer.use_freestyle = True
|
||||
# Passes
|
||||
ink_layer.use_pass_combined = True
|
||||
|
||||
# Freestyle
|
||||
ink_layer.freestyle_settings.crease_angle = 2.617944
|
||||
ink_layer.freestyle_settings.use_smoothness = True
|
||||
ink_layer.freestyle_settings.use_culling = True
|
||||
|
||||
if len(ink_layer.freestyle_settings.linesets)>0:
|
||||
ink_layer.freestyle_settings.linesets[0].name = name
|
||||
else:
|
||||
ink_layer.freestyle_settings.linesets.new(name)
|
||||
|
||||
lineset = ink_layer.freestyle_settings.linesets[name]
|
||||
|
||||
self.cfg_lineset(lineset, thickness, color)
|
||||
|
||||
# Turn on the transparency layer for the regular ink:
|
||||
if ink_layer.name!='Ink-Thru':
|
||||
ink_layer.layers[4] = True
|
||||
|
||||
|
||||
def cfg_lineset(self, lineset, thickness=3, color=(0,0,0)):
|
||||
"""
|
||||
Configure the lineset.
|
||||
"""
|
||||
#lineset.name = 'NormalInk'
|
||||
# Selection options
|
||||
lineset.select_by_visibility = True
|
||||
lineset.select_by_edge_types = True
|
||||
lineset.select_by_image_border = True
|
||||
lineset.select_by_face_marks = False
|
||||
lineset.select_by_group = True
|
||||
|
||||
# Visibility Option
|
||||
lineset.visibility = 'VISIBLE'
|
||||
|
||||
# Edge Type Options
|
||||
lineset.edge_type_negation = 'INCLUSIVE'
|
||||
lineset.edge_type_combination = 'OR'
|
||||
lineset.select_silhouette = True
|
||||
lineset.select_border = True
|
||||
lineset.select_contour = True
|
||||
lineset.select_crease = True
|
||||
lineset.select_edge_mark = True
|
||||
lineset.select_external_contour = True
|
||||
|
||||
# No Freestyle Group (If it exists)
|
||||
if 'No Freestyle' in bpy.data.groups:
|
||||
lineset.select_by_group = True
|
||||
lineset.group = bpy.data.groups['No Freestyle']
|
||||
lineset.group_negation = 'EXCLUSIVE'
|
||||
else:
|
||||
lineset.select_by_group = False
|
||||
|
||||
# Basic Ink linestyle:
|
||||
if 'Ink' in bpy.data.linestyles:
|
||||
lineset.linestyle = bpy.data.linestyles['Ink']
|
||||
else:
|
||||
lineset.linestyle.name = 'Ink'
|
||||
self.cfg_linestyle(lineset.linestyle, thickness, color)
|
||||
|
||||
|
||||
def cfg_linestyle(self, linestyle, thickness=INK_THICKNESS, color=INK_COLOR):
|
||||
# These are the only changeable parameters:
|
||||
linestyle.color = color
|
||||
linestyle.thickness = thickness
|
||||
|
||||
# The rest of this function just sets a common fixed style for "Lunatics!"
|
||||
linestyle.alpha = 1.0
|
||||
linestyle.thickness_position = 'CENTER'
|
||||
linestyle.use_chaining = True
|
||||
linestyle.chaining = 'PLAIN'
|
||||
linestyle.use_same_object = True
|
||||
linestyle.caps = 'ROUND'
|
||||
|
||||
# ADD THE ALONG-STROKE MODIFIER CURVE
|
||||
# TODO: try using the .new(type=...) idiom to see if it works?
|
||||
# This probably needs the scene context set?
|
||||
# bpy.ops.scene.freestyle_thickness_modifier_add(type='ALONG_STROKE')
|
||||
|
||||
linestyle.thickness_modifiers.new(type='ALONG_STROKE', name='taper')
|
||||
linestyle.thickness_modifiers['taper'].blend = 'MULTIPLY'
|
||||
linestyle.thickness_modifiers['taper'].mapping = 'CURVE'
|
||||
|
||||
# These are defaults, so maybe unnecessary?
|
||||
linestyle.thickness_modifiers['taper'].influence = 1.0
|
||||
linestyle.thickness_modifiers['taper'].invert = False
|
||||
linestyle.thickness_modifiers['taper'].value_min = 0.0
|
||||
linestyle.thickness_modifiers['taper'].value_max = 1.0
|
||||
|
||||
# This API is awful, but what it has to do is to change the location of the first two
|
||||
# points (which can't be removed), then add a third point. Then update to pick up the
|
||||
# changes:
|
||||
linestyle.thickness_modifiers['taper'].curve.curves[0].points[0].location = (0.0,0.0)
|
||||
linestyle.thickness_modifiers['taper'].curve.curves[0].points[1].location = (0.5,1.0)
|
||||
linestyle.thickness_modifiers['taper'].curve.curves[0].points.new(1.0,0.0)
|
||||
linestyle.thickness_modifiers['taper'].curve.update()
|
||||
|
||||
|
|
@ -0,0 +1,205 @@
|
|||
Design Notes
|
||||
============
|
||||
|
||||
**ABX** or "Anansi Blender Extensions" is a catch-all Blender plugin to hold
|
||||
current, custom, and experimental Blender extensions we use in Anansi
|
||||
Spaceworks Studio projects. As we mature projects, we may choose to move
|
||||
some of them into a more stable package or packages for wider distribution.
|
||||
|
||||
This file accumulates some design notes for additional projects to incorporate
|
||||
into ABX, from my daily worklog notes.
|
||||
|
||||
Copy Animation
|
||||
--------------
|
||||
|
||||
Options:
|
||||
|
||||
* Copy Active Action (Dopesheet animation)
|
||||
* Copy All NLA Actions
|
||||
* Apply a scale factor and copy animations instead of linking
|
||||
* Floating-point scale factor
|
||||
|
||||
This was my first goal with ABX. Blender provides no simple way to copy
|
||||
ALL of the animation from one object to another. This makes it very awkward
|
||||
to refactor or repair broken animation rig proxies -- a problem that can
|
||||
easily happen on a large project if things get renamed or files get moved.
|
||||
|
||||
Sometimes it's necessary to just create a new proxy from a character and
|
||||
transfer the animation to it. "Copy Animation" allows that.
|
||||
|
||||
With the new rescaling feature (added April 2021), it also allows us to fix
|
||||
scaling errors. For example, when we set up the "TR-Train" sequence in
|
||||
"Lunatics!" S1E01, the file's scale was set up wrong -- the rest of the project
|
||||
is in meter scale. But it was very awkward to try to change anything. It may
|
||||
still be hard, but we should be able to apply scales using this tool.
|
||||
|
||||
References:
|
||||
|
||||
https://blender.stackexchange.com/questions/74183/how-can-i-copy-nla-tracks-from-one-armature-to-another
|
||||
https://www.reddit.com/r/blender/comments/eu3w6m/guide_how_to_scale_a_rigify_rig/
|
||||
|
||||
|
||||
|
||||
Change Armature Proxy Name
|
||||
--------------------------
|
||||
|
||||
An alternative approach would be to change the name of a proxy armature.
|
||||
|
||||
Seems to work, but not sure::
|
||||
>>> bpy.data.objects['Georgiana_Pinafore_proxy'].proxy.data.name = 'georgiana_pinafore-TEST'
|
||||
|
||||
I wonder if we can just fix the broken proxy case without having to copy?
|
||||
|
||||
|
||||
Ink/Paint Configuration
|
||||
-----------------------
|
||||
|
||||
The "Ink/Paint Config" operator allows us to set up standard shot files ink/paint
|
||||
compositing, including several tricks I've come up with for "Lunatics!" to handle
|
||||
transparency, billboard objects, and the sky background correctly with the ink/paint
|
||||
setup.
|
||||
|
||||
So far (April 2021), it only supports "shot rendering" ("cam") files. I should
|
||||
also provide support at least for setting up "shot compositing" ("compos") files,
|
||||
which would take their input from the EXR files I create in the rendering phase.
|
||||
|
||||
|
||||
Setup Compositing Files
|
||||
-----------------------
|
||||
|
||||
Also should be able to automate compositing and repacking setups
|
||||
- find ranges of numbered frame files (EXR or PNG streams, etc),
|
||||
and choose from them to set up ranges (maybe checkbox list?).
|
||||
|
||||
Command line script using Blender to repack EXR files from
|
||||
source EXR directory or glob to target directory or with new extension/prefix. Currently can do this one by one in Blender,
|
||||
or by manually setting up ranges (but images must exist).
|
||||
|
||||
Want to automatically detect what frames exist and render those.
|
||||
We can use this to repack bulky EXRs from 2.71 into a more
|
||||
compact format in 2.79.
|
||||
|
||||
|
||||
|
||||
Compare Armatures
|
||||
-----------------
|
||||
|
||||
Getting bone objects from an armature in a proxy::
|
||||
|
||||
bpy.data.objects['Georgiana_Pinafore_proxy'].proxy.data.bones[0]
|
||||
|
||||
Research how to walk the armature to find all the bone names (and check one against the other).
|
||||
(Bones are not hierarchically organized in file. You have to trace the parent relationships
|
||||
and construct the hierarchy)::
|
||||
|
||||
[(root_A, [(A1, []), (A2, []), (A3, [(A3a,[]), (A3b,[])], (root_B, [])]
|
||||
|
||||
Then print indented & ordered::
|
||||
|
||||
root_A
|
||||
A1
|
||||
A2
|
||||
A3
|
||||
A3a
|
||||
A3b
|
||||
root_B
|
||||
|
||||
or as paths::
|
||||
|
||||
root_A
|
||||
root_A/A1
|
||||
root_A/A2
|
||||
root_A/A3
|
||||
root_A/A3/A3a
|
||||
root_A/A3/A3b
|
||||
root_B
|
||||
|
||||
Find "missing" bones -- in src, but not tgt
|
||||
Find "extra" bones -- in tgt, but not src
|
||||
|
||||
|
||||
Link Character into Scene
|
||||
-------------------------
|
||||
|
||||
How to add a character along with proxy for animation. Can we do this with tagging on character libraries?
|
||||
|
||||
Could add render profile & automatic naming tool here to set up correct
|
||||
rendering for project.
|
||||
|
||||
Initialize Files
|
||||
----------------
|
||||
|
||||
Wizard to create basic types of files we need:
|
||||
|
||||
- char
|
||||
- set
|
||||
- prop
|
||||
- anim
|
||||
- extra
|
||||
- mech
|
||||
- cam
|
||||
- compos
|
||||
|
||||
|
||||
Freestyle Camera-Clipping Configuration
|
||||
---------------------------------------
|
||||
|
||||
Create a companion scene from the current scene, with a Freestyle camera to
|
||||
be used to generate ink lines, but with a shorter camera clipping range.
|
||||
|
||||
fs_scene_name = bpy.context.scene.name + '-FS'
|
||||
CamOb = bpy.context.scene.camera
|
||||
FsCamOb = bpy.context.scene.camera.copy()
|
||||
FsCamOb.name = Cam.name + '-FS'
|
||||
|
||||
|
||||
NewScene = bpy.data.scenes.new(name=bpy.context.scene.name + '-FS')
|
||||
(Equivalent to bpy.ops.scene.new(type="NEW"), does not copy settings)
|
||||
|
||||
NewScene = bpy.ops.scene.new(type="EMPTY")
|
||||
(Better. Copies settings. But does not allow setting the name. Named by the current scene
|
||||
plus .001 -- probably will be .002 if there is already a .001)
|
||||
NewScene = bpy.data.scenes[OldScene.name = '.001']
|
||||
|
||||
NewScene.name = OldScene.name + '-FS'
|
||||
|
||||
No settings!
|
||||
|
||||
Instead:
|
||||
bpy.ops.scene.new(type="LINK_OBJECTS")
|
||||
NewScene = bpy.context.scene # Because ops updates the context
|
||||
# OR
|
||||
NewScene = bpy.data.scenes[OldScene.name + '.001']
|
||||
# IF that name wasn't used
|
||||
|
||||
NewScene.name = OldScene.name + '-FS'
|
||||
|
||||
for ob in OldScene.objects:
|
||||
if ob != OldScene.camera:
|
||||
NewScene.objects.link(ob)
|
||||
|
||||
NewScene.objects.link(FsCamOb)
|
||||
FsCamOb.data = FsCam.data.copy()
|
||||
FsCamOb.data.name = FsCam.data.name + '-FS'
|
||||
|
||||
NewScene.objects.unlink(OldScene.camera)
|
||||
|
||||
FsCamOb.data.clip_end = 10.0 # Just setting it to 10 meters
|
||||
|
||||
Had to fix my script to name the Color and Ink input renderlayer nodes
|
||||
(Now 'Color-In' and 'Ink-In'. Was just "Render Layer" and "Render Layer.001")
|
||||
|
||||
# Cross the streams!
|
||||
OldScene.node_tree.nodes['Ink-In'].scene = NewScene
|
||||
|
||||
NewScene.render.layers['Ink'].use = True
|
||||
if 'Ink-Thru' in NewScene.render.layers:
|
||||
NewScene.render.layers['Ink-Thru'].use = True
|
||||
NewScene.render.layers['Color'].use = False
|
||||
|
||||
OldScene.render.layers['Color'].use = True
|
||||
OldScene.render.layers['Ink'].use = False
|
||||
if 'Ink-Thru' in OldScene.render.layers:
|
||||
OldScene.render.layers['Ink-Thru'].use = False
|
||||
|
||||
|
|
@ -0,0 +1,138 @@
|
|||
I started ABX very informally, so I did not really have logical version numbers
|
||||
initially, and I'm not sure about the earliest packages I made.
|
||||
|
||||
In April 2021, I started trying to formally package with a consistent version
|
||||
number, with the following meanings:
|
||||
|
||||
|
||||
0.1.0 (<2021):
|
||||
Versions before 2021, with "Copy NLA" and "L.Ink Compos" and not much else.
|
||||
|
||||
There were some experimental version of context.py, file_context.py, and
|
||||
blender_context.py in the source, but they were unused and buggy or incomplete.
|
||||
|
||||
|
||||
0.2.0 (2021-02 to 2021-05):
|
||||
Versions from April/May 2021, with major design changes:
|
||||
|
||||
* "Copy NLA" became "Copy Animation", and acquired new options:
|
||||
- NLA (Old NLA behavior)
|
||||
- Dopesheet (copy the "active action", which appears in the dopesheet).
|
||||
- Copy/Rescale (instead of linking the animation, copy it, possibly rescaling)
|
||||
- Scale Factor (if other than 1.0, the animation is scaled when copied)
|
||||
|
||||
* "L.Ink Compos" became "Ink/Paint Config", and also got new options:
|
||||
- Ink-Thru (Freestyle ink workaround for transparency, can now be turned off)
|
||||
- Billboards (Accurate alpha-billboard ink masking technique)
|
||||
- Separate Sky (Generate sky on a separate render layer for accurate compositing)
|
||||
|
||||
* "Lunatics! Properties" got a new field for the scene "suffix" or title, used
|
||||
exclusively to give some extra information in the scene name. This was to
|
||||
accommodate short shot codes I was using, like "G.PoV" meaning "Georgiana
|
||||
point-of-view shot", to clarify the letter-codes for the shots, especially
|
||||
since I was changing some of them around.
|
||||
|
||||
|
||||
0.2.5 (2021-05):
|
||||
The last version before introducing the file_context data system into the main
|
||||
functions of the program. It contains fully-working and tested file_context.py
|
||||
and accumulate.py, but they haven't been updated with specific changes to
|
||||
accommodate the Blender UI yet, and the abx_ui.py module doesn't use them.
|
||||
|
||||
I made this version as a stable fallback for production with all of the recent
|
||||
bugfixes, but none of the new (and likely buggy) data system features.
|
||||
|
||||
Includes:
|
||||
|
||||
* render_profiles feature in the Render panel. This is a quick select for
|
||||
changing the render settings and output filepath to be named consistently
|
||||
and use the correct settings for the most common pre-visualization renders
|
||||
I make, and the full PNG stream render.
|
||||
- The profiles are hard-coded, because there's no contextual data yet.
|
||||
- EXR files may be generated from previz. I didn't make any attempt to
|
||||
stop this, because it's usually not relevant (they'll be clobbered by
|
||||
the full PNG renders, which are done last -- but this could be a problem
|
||||
if I tried to regenerate the previz later)
|
||||
|
||||
|
||||
|
||||
ROADMAP (for 2021?):
|
||||
|
||||
0.2.6 (2021-05):
|
||||
The first attempt to incorporate YAML configuration into the program.
|
||||
|
||||
* Introduce better fallback behavior when project YAML files aren't found,
|
||||
and give informative messages to the user about them.
|
||||
|
||||
* Move program defaults into abx.yaml and project.yaml files in the ABX
|
||||
source code. Load these before project files to guarantee default requirements
|
||||
are met.
|
||||
|
||||
* Use abx.yaml 'render_profiles' block to set options for render_profiles
|
||||
|
||||
0.2.7:
|
||||
Replace "Lunatics! Properties" with "Project Properties" (the project name will
|
||||
be one of the properties, and probably read-only). Parse the "role" field.
|
||||
Generate read-only data from file_context and appropriate data-type fields
|
||||
for unspecified ranks.
|
||||
|
||||
0.2.8:
|
||||
Fix the schema override issue to allow for use with library assets or other
|
||||
hierarchy besides episodes. Improve the handling of "rank" (possibly refactor
|
||||
the code to make rank-handling more intuitive?)
|
||||
|
||||
0.2.9:
|
||||
Write / save / update to filepath to allow changing the name based on the
|
||||
input from the Project Properties (Not 100% sure I want this).
|
||||
|
||||
0.3.0:
|
||||
0.3.0 should have full data support driving the program features.
|
||||
|
||||
0.3.1:
|
||||
Pop-up report generation? I had this working experimentally, years ago, and
|
||||
it'd be useful to generate pop-up reports for some of the introspection
|
||||
features I want to add.
|
||||
|
||||
0.3.x:
|
||||
Improvements to data system and other capabilities.
|
||||
|
||||
* Name context by group, rather than scene? Would be handy for library assets.
|
||||
|
||||
* Possible integration with KitCAT? There's significant overlap between ABX
|
||||
and KitCAT and I already call the metadata files "kitcat" data in the sources.
|
||||
Will they be merely interoperable, or should I actually make ABX into the KitCAT
|
||||
plugin for Blender?
|
||||
|
||||
* Port ABX to Blender 2.8+, keeping 2.7 support, if possible. If I'm really
|
||||
going to use 2.8 for compositing, I will need this.
|
||||
|
||||
0.4.x (hopefully during 2021, because these would be very useful to have!):
|
||||
New features. Hopefully:
|
||||
|
||||
* Recognize/use "role" field.
|
||||
* Ink/Paint Config for compositing (as opposed to rendering)
|
||||
* Appropriate config tools for anim, mech, etc -- other roles
|
||||
* Automate Freestyle "Ink-Camera" clipping system.
|
||||
* Character Armature comparison (find misnamed or extra bones)
|
||||
* Insert characters into an animation file
|
||||
* Find/report broken dupligroups and/or animation proxies
|
||||
* Generate linking report(s) -- information to support correct
|
||||
linking diagrams, or possibly source for auto-generating them?
|
||||
|
||||
0.5.x:
|
||||
Integration with Papagayo / Lipsync data for animation?
|
||||
Maybe find the appropriate data files and run the lipsync importer on them?
|
||||
Or perhaps incorporate the lipsync importer code?
|
||||
|
||||
|
||||
0.6.x:
|
||||
Automated management of credits & licensing? (Or does this all fall under
|
||||
the heading of KitCAT?)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
above_root: True
|
||||
|
||||
# This file should NOT be found by testing!
|
After Width: | Height: | Size: 141 KiB |
After Width: | Height: | Size: 135 KiB |
After Width: | Height: | Size: 141 KiB |
After Width: | Height: | Size: 146 KiB |
After Width: | Height: | Size: 154 KiB |
After Width: | Height: | Size: 148 KiB |
After Width: | Height: | Size: 141 KiB |
After Width: | Height: | Size: 163 KiB |
After Width: | Height: | Size: 156 KiB |
After Width: | Height: | Size: 136 KiB |
After Width: | Height: | Size: 144 KiB |
After Width: | Height: | Size: 146 KiB |
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
above_root: True
|
||||
|
||||
# This file should NOT be found by testing!
|
|
@ -0,0 +1,30 @@
|
|||
# Episode definitions and metadata
|
||||
---
|
||||
project_unit:
|
||||
- rank: episode
|
||||
code: 001
|
||||
name: Pilot
|
||||
|
||||
sequences:
|
||||
- ZP
|
||||
- OP
|
||||
- LP
|
||||
# Override alphabetical order for subunits.
|
||||
|
||||
schema:
|
||||
delimiter: '-'
|
||||
# Go back to the default field separator for things under episode
|
||||
#
|
||||
# So it'll actually parse like this:
|
||||
#
|
||||
# A.001-LP-1-BeginningOfEnd-anim.txt
|
||||
#
|
||||
# (A, 001-LP-1-BeginningOfEnd-anim, txt)
|
||||
# (A, (001, LP, 1, BeginningOfEnd, anim), txt)
|
||||
#
|
||||
# Not sure I'm happy about the filetype being cutoff at the top level
|
||||
#
|
||||
|
||||
definitions:
|
||||
render_root: 'Episodes/A.001-Pilot/Renders' # Relative to project_root!
|
||||
|
9
testdata/myproject/Episodes/A.001-Pilot/Seq/LP-LastPoint/A.001-LP-1-BeginningOfEnd-anim.txt
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
The contents of this file are not used.
|
||||
The file_context module only reads the exterior metadata from the filesystem,
|
||||
including:
|
||||
|
||||
- file name
|
||||
- file stats (modification dates, permissions, etc)
|
||||
- directory path to the file
|
||||
- "sidecar" data files in YAML format for each level of directory
|
||||
(these are treated as attributes of the directory they are contained in).
|
5
testdata/myproject/Episodes/A.001-Pilot/Seq/LP-LastPoint/A.001-LP-1-BeginningOfEnd-anim.yaml
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
# Example 'sidecar' file
|
||||
---
|
||||
project_unit:
|
||||
- rank: block
|
||||
code: 1
|
|
@ -0,0 +1,9 @@
|
|||
---
|
||||
project_unit:
|
||||
- code: LP
|
||||
rank: sequence
|
||||
name: Last Point
|
||||
description: >
|
||||
Final scene in our exciting production 'My Project' in which the
|
||||
heroes finally get to the end of the metadata.
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
testscalar: loweryaml
|
||||
|
||||
testdict:
|
||||
A:
|
||||
- 'item2'
|
||||
- 'item4'
|
||||
|
||||
B: 2
|
||||
|
||||
C:
|
||||
a: 1
|
||||
b: 3
|
||||
d: 3
|
||||
e: 4
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
# This is a project root file in the wrong place, it makes everything below it a separate
|
||||
# project, which was probably not intended.
|
||||
|
||||
project_root:
|
||||
title: "Full Project Under Project"
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
---
|
||||
project_unit:
|
||||
- rank: series
|
||||
|
||||
code: A
|
||||
name: Series A
|
||||
|
||||
schema:
|
||||
delimiter: '.'
|
||||
# Kind of a hack to fix the S1E01 notation with a strict delimiter-splitting model
|
||||
# for fields?
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
# Anansi Blender Extensions (specific) data file (Overrides generic values as needed)
|
||||
---
|
||||
abx: True
|
||||
|
||||
testscalar: topyaml
|
||||
|
||||
testdict:
|
||||
A:
|
||||
- 'item1'
|
||||
- 'item2'
|
||||
- 'item3'
|
||||
|
||||
B: 1
|
||||
|
||||
C:
|
||||
a: 1
|
||||
b: 1
|
||||
c: 2
|
||||
d: 3
|
||||
|
|
@ -0,0 +1,159 @@
|
|||
# Project definitions and metadata
|
||||
---
|
||||
project_root:
|
||||
# Top level information about the project.
|
||||
owner: "My Production Company"
|
||||
copyright: "(C) My Production Company, LLC"
|
||||
contact: "The Boss <the_boss@my_production_company.com>"
|
||||
url: my_production_company.com
|
||||
|
||||
|
||||
project_unit:
|
||||
# Project unit for the whole project (i.e. top of the DAMS path)
|
||||
# This is expressed as a list, because with UnionList, it will accumulate
|
||||
# project_units down to the filepath, based on any intervening control
|
||||
# files (which should be there in this design, though they can be very
|
||||
# simple).
|
||||
|
||||
- rank: project
|
||||
# Just explicitly tell the DAMS path level
|
||||
# (avoid need to infer from directory path)
|
||||
|
||||
code: myproject
|
||||
# Short name used internally for reference (usually same as directory name)
|
||||
# Avoid spaces. Use identifier characters ([A-Za-z_0-9]). Can start with or
|
||||
# be a number, though.
|
||||
|
||||
name: My Project
|
||||
# Short name used to refer to project (for humans). Can have spacing and punctuation,
|
||||
# but should still be short.
|
||||
# Optional -- use id if not provided.
|
||||
|
||||
title: Project of Mine
|
||||
# Longer name used in headings (optional -- if not provided, name is used).
|
||||
|
||||
description: >
|
||||
This is a test project used only for testing the ABX and KitCAT file context
|
||||
analysis system.
|
||||
|
||||
# Descriptive paragraph for documentation and browsing interfaces.
|
||||
# Note the '>': this is how you put block text in YAML.
|
||||
|
||||
project_schema:
|
||||
- rank: project
|
||||
delimiter: '-'
|
||||
words: True
|
||||
type: string
|
||||
maxlength: 32
|
||||
|
||||
- rank: series
|
||||
delimiter: '.'
|
||||
type: letter
|
||||
|
||||
- rank: episode
|
||||
delimiter: '-'
|
||||
type: number
|
||||
maxlength: 3
|
||||
minlength: 3
|
||||
pad: 0
|
||||
format: "{:0>3d}"
|
||||
|
||||
- rank: sequence
|
||||
type:
|
||||
mt: Main Title
|
||||
et: Episode Title
|
||||
ec: Episode Credits
|
||||
oe: Out Eyecatch
|
||||
ie: In Eyecatch
|
||||
pr: Promotional
|
||||
ad: Sponsor Ad
|
||||
ZP: Zero Point
|
||||
OP: One Point
|
||||
LP: Last Point
|
||||
maxlength: 2
|
||||
minlength: 2
|
||||
pad: '_'
|
||||
|
||||
- rank: block
|
||||
type: number
|
||||
default: 0
|
||||
maxlength: 1
|
||||
minlength: 1
|
||||
pad: 0
|
||||
|
||||
- rank: camera
|
||||
type:
|
||||
- 'c1'
|
||||
- 'c2'
|
||||
- 'c3'
|
||||
- 'c4'
|
||||
default: None
|
||||
maxlength: 2
|
||||
minlength: 2
|
||||
|
||||
- rank: shot
|
||||
type: letter
|
||||
default: A
|
||||
maxlength: 1
|
||||
minlength: 1
|
||||
pad: 0
|
||||
|
||||
- rank: element
|
||||
type: string
|
||||
default: None
|
||||
maxlength: 5
|
||||
minlength: 0
|
||||
|
||||
|
||||
definitions:
|
||||
filetypes:
|
||||
blend: "Blender File"
|
||||
kdenlive: "Kdenlive Video Editor File"
|
||||
mlt: "Kdenlive Video Mix Script"
|
||||
svg: "Scalable Vector Graphics (Inkscape)"
|
||||
kra: "Krita Graphic File"
|
||||
xcf: "Gimp Graphic File"
|
||||
png: "Portable Network Graphics (PNG) Image"
|
||||
jpg: "Joint Photographic Experts Group (JPEG) Image"
|
||||
aup: "Audacity Project"
|
||||
ardour: "Ardour Project"
|
||||
flac: "Free Lossless Audio Codec (FLAC)"
|
||||
mp3: "MPEG Audio Layer III (MP3) Audio File"
|
||||
ogg: "Ogg Vorbis Audio File"
|
||||
avi: "Audio Video Interleave (AVI) Video Container"
|
||||
mkv: "Matroska Video Container"
|
||||
mp4: "Moving Picture Experts Group (MPEG) 4 Format"
|
||||
txt: "Plain Text File"
|
||||
|
||||
roles:
|
||||
extras: "Extras, crowds, auxillary animated movement"
|
||||
mech: "Mechanical animation"
|
||||
anim: "Character animation"
|
||||
cam: "Camera direction"
|
||||
vfx: "Visual special effects"
|
||||
compos: "Compositing"
|
||||
bkg: "Background 2D image"
|
||||
bb: "Billboard 2D image"
|
||||
tex: "Texture 2D image"
|
||||
foley: "Foley sound"
|
||||
voice: "Voice recording"
|
||||
fx: "Sound effects"
|
||||
music: "Music track"
|
||||
cue: "Musical cue"
|
||||
amb: "Ambient sound"
|
||||
loop: "Ambient sound loop"
|
||||
edit: "Video edit"
|
||||
|
||||
roles_by_filetype:
|
||||
kdenlive: edit
|
||||
mlt: edit
|
||||
|
||||
omit_ranks: # Controls how much we shorten names
|
||||
edit: 0 # Use the entire designation for final output
|
||||
render: 1 # Use everything but project name for rendering
|
||||
filename: 1 # Same for filenames in general
|
||||
scene: 3 # Omit project, series, episode for (Blender) scene names
|
||||
|
||||
|
||||
|
||||
|
9
testdata/yaminimal/Episodes/Ae1-Void/Seq/VN-VagueName/Ae1-VN-1-VoidOfData-anim.txt
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
The contents of this file are not used.
|
||||
The file_context module only reads the exterior metadata from the filesystem,
|
||||
including:
|
||||
|
||||
- file name
|
||||
- file stats (modification dates, permissions, etc)
|
||||
- directory path to the file
|
||||
- "sidecar" data files in YAML format for each level of directory
|
||||
(these are treated as attributes of the directory they are contained in).
|
|
@ -0,0 +1,42 @@
|
|||
---
|
||||
render_profiles:
|
||||
- previz:
|
||||
engine: gl
|
||||
version: any
|
||||
fps: 30 # Basic FPS setting
|
||||
fps_div: 1000 # FPS divisor (for NTSC)
|
||||
fps_skip: 1 # Frames to skip ("on Ns")
|
||||
suffix: GL # Suffix used on render files
|
||||
format: AVI_JPEG # (AVI_JPEG, AVI_PNG, PNG, JPG, MP4...)
|
||||
freestyle: off
|
||||
|
||||
- paint: # Paint-only renders, full 30fps
|
||||
engine: bi
|
||||
fps: 30
|
||||
fps_skip: 10
|
||||
suffix: PT
|
||||
format: AVI_JPEG
|
||||
freestyle: off
|
||||
antialias: off
|
||||
motionblur: off
|
||||
|
||||
- check: # Check renders (1 fps, all effects)
|
||||
engine: bi
|
||||
fps: 30
|
||||
fps_skip: 30
|
||||
suffix: CHK
|
||||
format: JPG
|
||||
framedigits: 5 # No. of digits for frame suffix
|
||||
freestyle: on
|
||||
antialias: 8
|
||||
|
||||
- full:
|
||||
engine: bi
|
||||
fps: 30
|
||||
fps_skip: 1
|
||||
suffix: off
|
||||
format: PNG
|
||||
framedigits: 5
|
||||
freestyle: on
|
||||
antialias: 8
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
---
|
||||
# Project root must exist, but contents aren't really necessary unless
|
||||
# we want to have the data available (the value here doesn't even matter,
|
||||
# just that 'project_root' exists as a key in this data:
|
||||
project_root: True
|
||||
|
||||
# By default, the project_root's directory name is the code for the project.
|
||||
# Nothing is strictly require of the 'project_unit' item, but it can give
|
||||
# us an explicit, longer-form title than the directory name:
|
||||
project_unit:
|
||||
- title: Project with Minimal YAML Data
|
||||
|
||||
# Without SOME kind of schema, we can't figure much out, so there HAS
|
||||
# to be a project_schema:
|
||||
project_schema:
|
||||
- rank: project
|
||||
delimiter: '-'
|
||||
words: True
|
||||
type: string
|
||||
|
||||
- rank: series
|
||||
delimiter: 'e'
|
||||
type: letter
|
||||
|
||||
- rank: episode
|
||||
delimiter: '-'
|
||||
type: number
|
||||
maxlength: 1
|
||||
|
||||
- rank: sequence
|
||||
type:
|
||||
VN: Vague Name
|
||||
|
||||
- rank: block
|
||||
type: number
|
||||
maxlength: 1
|
||||
|
||||
- rank: shot
|
||||
type: letter
|
||||
maxlength: 1
|
||||
|
9
testdata/yamlless/Episodes/Ae1-Void/Seq/VN-VagueName/Ae1-VN-1-VoidOfData-anim.txt
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
The contents of this file are not used.
|
||||
The file_context module only reads the exterior metadata from the filesystem,
|
||||
including:
|
||||
|
||||
- file name
|
||||
- file stats (modification dates, permissions, etc)
|
||||
- directory path to the file
|
||||
- "sidecar" data files in YAML format for each level of directory
|
||||
(these are treated as attributes of the directory they are contained in).
|
|
@ -0,0 +1,253 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
The 'accumulate' module implements mutable data structures, like Dictionary
|
||||
and List, but which implement set operations that allow information from
|
||||
additional dictionaries and lists (whether using the new data types or not),
|
||||
to be combined with the existing information in a recursive way.
|
||||
|
||||
The problem it solves:
|
||||
|
||||
For various purposes, usually involving metadata, I'm going to want to
|
||||
collect information from various sources around a project: YAML files,
|
||||
INI files, JSON or YAML strings inside Blender documents, query responses
|
||||
from a central database, and so on. Each will generally have an incomplete
|
||||
picture of the information I want, and I want the data to be filled in.
|
||||
|
||||
Ordinary Python dictionaries and lists do not work well for this.
|
||||
|
||||
With the "update" method of standard Python dictionaries, if a key exists in
|
||||
the new dictionary, its value will always REPLACE the value in the old
|
||||
dictionary -- even if that value is itself a data structure, such as a
|
||||
dictionary. There is no recursion into sub-dictionaries. This makes it poor at
|
||||
combining nested data structures.
|
||||
|
||||
With the "extend" method of lists, if two source show the same information,
|
||||
there will now be two copies with redundant information, and extending it
|
||||
again will produce additional ones.
|
||||
|
||||
The accumulate module therefore provides RecursiveDict, with an update
|
||||
method that will recurse into sub-dictionaries and combine sequence elements
|
||||
using an idempotent "ordered-set-union" operation.
|
||||
|
||||
It also provides convenient conversions to YAML or JSON serializations of
|
||||
the data for output to text files or text blocks.
|
||||
"""
|
||||
|
||||
import unittest, os
|
||||
|
||||
# This is the most ridiculous work-around, but it seems to be necessary to
|
||||
# get Python 3 to import the modules for testing
|
||||
import sys
|
||||
print("__file__ = ", __file__)
|
||||
sys.path.append(os.path.normpath(os.path.join(__file__, '..', '..')))
|
||||
|
||||
from abx import accumulate
|
||||
|
||||
class AccumulationTests(unittest.TestCase):
|
||||
"""
|
||||
Test combination operations give correct results.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
# AFAIK, I don't need any setup
|
||||
# I'm only putting this in in case I need to add something to it.
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
# AFAIK, I don't need any teardown
|
||||
pass
|
||||
|
||||
def test_union_list_union(self):
|
||||
# I start out with a UnionList containing a simple list:
|
||||
A = accumulate.UnionList([1,2,3])
|
||||
|
||||
# I then union it with a new list with some shared and some
|
||||
# new elements:
|
||||
C = A.union([3,4,5])
|
||||
|
||||
# The new list should have the original elements plus the
|
||||
# the new elements that don't repeat existing elements:
|
||||
self.assertEqual(C, [1,2,3,4,5])
|
||||
|
||||
def test_subdictionary_updates_instead_of_being_replaced(self):
|
||||
# I start out with a dictionary that contains a subdictionary
|
||||
# with some data, under key 'A':
|
||||
first = {'A': {'a':1}}
|
||||
|
||||
# And another dictionary that has the same subdictionary, with
|
||||
# different information:
|
||||
second = {'A': {'b':2}}
|
||||
|
||||
# I convert first to a RecursiveDict
|
||||
first_recursive = accumulate.RecursiveDict(first)
|
||||
|
||||
# Then I update it with the second dictionary (it shouldn't
|
||||
# matter that it isn't a recursive dictionary)
|
||||
first_recursive.update(second)
|
||||
|
||||
# The subdictionary should contain the new value:
|
||||
self.assertEqual(first_recursive['A']['b'], 2)
|
||||
|
||||
# And it should still contain the old value:
|
||||
self.assertEqual(first_recursive['A']['a'], 1)
|
||||
|
||||
|
||||
def test_sublist_updates_as_an_ordered_set_union(self):
|
||||
# I start with a dictionary that contains a sublist under a key:
|
||||
first = {'L':[1,2,3,4]}
|
||||
|
||||
# And a second dictionary with a different sublist under the
|
||||
# same key:
|
||||
second = {'L':[5,4,3,6]}
|
||||
|
||||
# I convert first to a recursive dict:
|
||||
first_recursive = accumulate.RecursiveDict(first)
|
||||
|
||||
# Then I update it with the second dictionary:
|
||||
first_recursive.update(second)
|
||||
|
||||
# The resulting list should combine, but only with
|
||||
# the unique new elements:
|
||||
self.assertEqual(first_recursive['L'], [1,2,3,4,5,6])
|
||||
|
||||
# Then I update it again:
|
||||
first_recursive.update(second)
|
||||
|
||||
# This shouldn't make any difference!
|
||||
self.assertEqual(first_recursive['L'], [1,2,3,4,5,6])
|
||||
|
||||
|
||||
class CollectYaml(unittest.TestCase):
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TESTPATH = os.path.join(TESTDATA, 'myproject/Episodes/' +
|
||||
'A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
|
||||
TESTPATH_EMBEDDED_PROJ = os.path.join(TESTDATA, 'myproject/')
|
||||
|
||||
def test_collect_yaml_files_w_abx_rules(self):
|
||||
files = accumulate.collect_yaml_files(self.TESTPATH, 'abx',
|
||||
root = os.path.join(self.TESTDATA, 'myproject'))
|
||||
|
||||
self.assertEqual([os.path.abspath(f) for f in files],
|
||||
[os.path.join(self.TESTDATA, 'myproject/abx.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/abx.yaml')])
|
||||
|
||||
|
||||
def test_collect_yaml_files_w_kitcat_rules(self):
|
||||
files = accumulate.collect_yaml_files(self.TESTPATH,
|
||||
('kitcat', 'project'), dirmatch=True, sidecar=True,
|
||||
root = os.path.join(self.TESTDATA, 'myproject'))
|
||||
|
||||
self.assertEqual([os.path.join(self.TESTDATA, f) for f in files],
|
||||
[os.path.join(self.TESTDATA, 'myproject/myproject.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/Episodes.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/A.001-Pilot.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'LP-LastPoint.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.yaml')
|
||||
])
|
||||
|
||||
def test_detecting_project_root(self):
|
||||
self.assertFalse(accumulate.has_project_root(
|
||||
os.path.join(self.TESTDATA, 'kitcat.yaml')))
|
||||
self.assertTrue(accumulate.has_project_root(
|
||||
os.path.join(self.TESTDATA, 'myproject/myproject.yaml')))
|
||||
self.assertFalse(accumulate.has_project_root(
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/A.001-Pilot.yaml')))
|
||||
|
||||
def test_trim_to_project_root(self):
|
||||
trimmed = accumulate.trim_to_project_root(
|
||||
[os.path.join(self.TESTDATA, 'kitcat.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/myproject.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/A.001-Pilot.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/Seq/' +
|
||||
'LP-LastPoint/A.001-LP-1-BeginningOfEnd-anim.yaml')]
|
||||
)
|
||||
|
||||
self.assertEqual([os.path.abspath(f) for f in trimmed],
|
||||
[os.path.join(self.TESTDATA, 'myproject/myproject.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/A.001-Pilot.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/Seq/' +
|
||||
'LP-LastPoint/A.001-LP-1-BeginningOfEnd-anim.yaml')])
|
||||
|
||||
def test_trim_to_project_under_project(self):
|
||||
trimmed = accumulate.trim_to_project_root(
|
||||
[os.path.join(self.TESTDATA, 'kitcat.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/myproject.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.002-Second/kitcat.yaml')])
|
||||
|
||||
self.assertEqual([os.path.abspath(f) for f in trimmed],
|
||||
[os.path.join(self.TESTDATA, 'myproject/Episodes/A.002-Second/kitcat.yaml')])
|
||||
|
||||
def test_finding_project_root_dir_from_kitcat_files(self):
|
||||
rootdir = accumulate.get_project_root(
|
||||
accumulate.collect_yaml_files(
|
||||
os.path.abspath(self.TESTPATH),
|
||||
('kitcat', 'project'), dirmatch=True, sidecar=True))
|
||||
|
||||
self.assertEqual(os.path.abspath(rootdir),
|
||||
os.path.join(self.TESTDATA, 'myproject'))
|
||||
|
||||
def test_finding_abx_files_from_kitcat_root(self):
|
||||
rootdir = accumulate.get_project_root(
|
||||
accumulate.collect_yaml_files(
|
||||
os.path.abspath(self.TESTPATH),
|
||||
('kitcat', 'project'), dirmatch=True, sidecar=True))
|
||||
|
||||
abx_files = accumulate.collect_yaml_files(
|
||||
os.path.abspath(self.TESTPATH),
|
||||
'abx', root=rootdir)
|
||||
|
||||
self.assertEqual([os.path.abspath(f) for f in abx_files],
|
||||
[os.path.join(self.TESTDATA, 'myproject/abx.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/abx.yaml')])
|
||||
|
||||
def test_combining_abx_yaml_files(self):
|
||||
abx_files = [
|
||||
os.path.join(self.TESTDATA, 'myproject/abx.yaml'),
|
||||
os.path.join(self.TESTDATA, 'myproject/Episodes/A.001-Pilot/abx.yaml')]
|
||||
|
||||
testdata = accumulate.combine_yaml(abx_files)
|
||||
|
||||
self.assertEqual(testdata['testscalar'], 'loweryaml')
|
||||
self.assertEqual(
|
||||
list(testdata['testdict']['A']),
|
||||
['item1', 'item2', 'item3', 'item4'])
|
||||
|
||||
def test_collecting_yaml_from_empty_dir(self):
|
||||
files = accumulate.collect_yaml_files(
|
||||
os.path.join(self.TESTDATA, 'empty/'),
|
||||
'spam', root = self.TESTDATA)
|
||||
|
||||
self.assertEqual(list(files), [])
|
||||
|
||||
def test_collecting_yaml_from_nonexistent_file(self):
|
||||
files = accumulate.collect_yaml_files(
|
||||
os.path.join(self.TESTDATA, 'empty/no_such_file.txt'),
|
||||
'spam', root = self.TESTDATA)
|
||||
|
||||
self.assertEqual(list(files), [])
|
||||
|
||||
def test_combining_yamls_from_empty_list(self):
|
||||
data = accumulate.combine_yaml([])
|
||||
|
||||
self.assertEqual(dict(data), {})
|
||||
|
||||
def test_getting_project_data_from_path(self):
|
||||
root, kitcat_data, abx_data = accumulate.get_project_data(self.TESTPATH)
|
||||
|
||||
self.assertEqual(
|
||||
os.path.abspath(root),
|
||||
os.path.join(self.TESTDATA, 'myproject'))
|
||||
|
||||
self.assertEqual(kitcat_data['project_unit'][0]['code'], 'myproject')
|
||||
|
||||
self.assertEqual(abx_data['testdict']['A'],
|
||||
['item1', 'item2', 'item3', 'item4'])
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,436 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test the file_context module.
|
||||
|
||||
This was written well after I wrote the module, and starts out as a conversion
|
||||
from the doctests I had in the module already.
|
||||
"""
|
||||
|
||||
|
||||
import unittest, os, textwrap
|
||||
import yaml
|
||||
|
||||
import sys
|
||||
print("__file__ = ", __file__)
|
||||
sys.path.append(os.path.normpath(os.path.join(__file__, '..', '..')))
|
||||
|
||||
from abx import file_context
|
||||
|
||||
class FileContext_NameSchema_Interface_Tests(unittest.TestCase):
|
||||
"""
|
||||
Test the interfaces presented by NameSchema.
|
||||
|
||||
NameSchema is not really intended to be used from outside the
|
||||
file_context module, but it is critical to the behavior of the
|
||||
module, so I want to make sure it's working as expected.
|
||||
"""
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TESTPROJECTYAML = os.path.join(TESTDATA, 'myproject', 'myproject.yaml')
|
||||
|
||||
TESTPATH = os.path.join(TESTDATA, 'myproject/Episodes/' +
|
||||
'A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
|
||||
|
||||
# Normally from 'project_schema' in YAML
|
||||
TESTSCHEMA_LIST =[
|
||||
{'rank': 'project', 'delimiter':'-', 'format':'{:s}', 'words':True},
|
||||
{'rank': 'series', 'delimiter':'E', 'format':'{:2s}'},
|
||||
{'rank': 'episode', 'delimiter':'-', 'format':'{!s:>02s}'},
|
||||
{'rank': 'sequence','delimiter':'-', 'format':'{:2s}'},
|
||||
{'rank': 'block', 'delimiter':'-', 'format':'{!s:1s}'},
|
||||
{'rank': 'shot', 'delimiter':'-', 'format':'{!s:s}'},
|
||||
{'rank': 'element', 'delimiter':'-', 'format':'{!s:s}'}]
|
||||
|
||||
def test_NameSchema_create_single(self):
|
||||
ns = file_context.NameSchema(schema = self.TESTSCHEMA_LIST[0])
|
||||
|
||||
# Test for ALL the expected properties:
|
||||
|
||||
# Set by the test schema
|
||||
self.assertEqual(ns.rank, 'project')
|
||||
self.assertEqual(ns.delimiter, '-')
|
||||
self.assertEqual(ns.format, '{:s}')
|
||||
self.assertEqual(ns.words, True)
|
||||
self.assertEqual(ns.codetype, str)
|
||||
|
||||
# Default values
|
||||
self.assertEqual(ns.pad, '0')
|
||||
self.assertEqual(ns.minlength, 1)
|
||||
self.assertEqual(ns.maxlength, 0)
|
||||
self.assertEqual(ns.default, None)
|
||||
|
||||
# Candidates for removal:
|
||||
self.assertEqual(ns.irank, 0) # Is this used at all?
|
||||
self.assertEqual(ns.parent, None)
|
||||
self.assertListEqual(list(ns.ranks),
|
||||
['series', 'episode', 'sequence',
|
||||
'block', 'camera', 'shot', 'element'])
|
||||
|
||||
def test_NameSchema_load_chain_from_project_yaml(self):
|
||||
with open(self.TESTPROJECTYAML, 'rt') as yaml_file:
|
||||
data = yaml.safe_load(yaml_file)
|
||||
schema_dicts = data['project_schema']
|
||||
|
||||
schema_chain = []
|
||||
last = None
|
||||
for schema_dict in schema_dicts:
|
||||
rank = schema_dict['rank']
|
||||
parent = last
|
||||
schema_chain.append(file_context.NameSchema(
|
||||
parent = parent,
|
||||
rank = rank,
|
||||
schema = schema_dict))
|
||||
last = schema_chain[-1]
|
||||
|
||||
#print( schema_chain )
|
||||
|
||||
self.assertEqual(len(schema_chain), 8)
|
||||
|
||||
self.assertEqual(
|
||||
schema_chain[-1].parent.parent.parent.parent.parent.parent.parent.rank,
|
||||
'project')
|
||||
|
||||
self.assertEqual(schema_chain[5].rank, 'camera')
|
||||
self.assertEqual(schema_chain[5].codetype[1], ('c2', 'c2', 'c2'))
|
||||
|
||||
|
||||
|
||||
|
||||
class FileContext_Parser_UnitTests(unittest.TestCase):
|
||||
TESTFILENAMES = ('S1E01-SF-4-SoyuzDMInt-cam.blend', 'S1E02-MM-MediaMontage-compos.blend',
|
||||
'S1E01-PC-PressConference-edit.kdenlive',
|
||||
'S1E01-LA-Launch.kdenlive')
|
||||
|
||||
# Collected by analyzing YAML control files ('project_unit').
|
||||
TESTNAMEPATHS = (('Lunatics', 'S1', '1', 'SF', '4'),
|
||||
('Lunatics', 'S1', '2', 'MM'),
|
||||
('Lunatics', 'S1', '1', 'PC'),
|
||||
('Lunatics', 'S1', '1', 'LA'))
|
||||
|
||||
# Normally from 'project_schema' in YAML
|
||||
TESTSCHEMA_LIST =[
|
||||
{'rank': 'project', 'delimiter':'-', 'format':'{:s}', 'words':True},
|
||||
{'rank': 'series', 'delimiter':'E', 'format':'{:2s}'},
|
||||
{'rank': 'episode', 'delimiter':'-', 'format':'{!s:>02s}'},
|
||||
{'rank': 'sequence','delimiter':'-', 'format':'{:2s}'},
|
||||
{'rank': 'block', 'delimiter':'-', 'format':'{!s:1s}'},
|
||||
{'rank': 'shot', 'delimiter':'-', 'format':'{!s:s}'},
|
||||
{'rank': 'element', 'delimiter':'-', 'format':'{!s:s}'}]
|
||||
|
||||
# Normally from 'definitions' in YAML
|
||||
TESTDEFS = {
|
||||
'filetypes':{
|
||||
'blend': "Blender File",
|
||||
'kdenlive': "Kdenlive Video Editor File",
|
||||
'mlt': "Kdenlive Video Mix Script",
|
||||
'svg': "Scalable Vector Graphics (Inkscape)",
|
||||
'kra': "Krita Graphic File",
|
||||
'xcf': "Gimp Graphic File",
|
||||
'png': "Portable Network Graphics (PNG) Image",
|
||||
'jpg': "Joint Photographic Experts Group (JPEG) Image",
|
||||
'aup': "Audacity Project",
|
||||
'ardour': "Ardour Project",
|
||||
'flac': "Free Lossless Audio Codec (FLAC)",
|
||||
'mp3': "MPEG Audio Layer III (MP3) Audio File",
|
||||
'ogg': "Ogg Vorbis Audio File",
|
||||
'avi': "Audio Video Interleave (AVI) Video Container",
|
||||
'mkv': "Matroska Video Container",
|
||||
'mp4': "Moving Picture Experts Group (MPEG) 4 Format}",
|
||||
},
|
||||
'roles':{
|
||||
'extras': "Extras, crowds, auxillary animated movement",
|
||||
'mech': "Mechanical animation",
|
||||
'anim': "Character animation",
|
||||
'cam': "Camera direction",
|
||||
'vfx': "Visual special effects",
|
||||
'compos': "Compositing",
|
||||
'bkg': "Background 2D image",
|
||||
'bb': "Billboard 2D image",
|
||||
'tex': "Texture 2D image",
|
||||
'foley': "Foley sound",
|
||||
'voice': "Voice recording",
|
||||
'fx': "Sound effects",
|
||||
'music': "Music track",
|
||||
'cue': "Musical cue",
|
||||
'amb': "Ambient sound",
|
||||
'loop': "Ambient sound loop",
|
||||
'edit': "Video edit"
|
||||
},
|
||||
'roles_by_filetype': {
|
||||
'kdenlive': 'edit',
|
||||
'mlt': 'edit'
|
||||
}
|
||||
}
|
||||
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TESTPATH = os.path.join(TESTDATA, 'myproject/Episodes/' +
|
||||
'A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
|
||||
def setUp(self):
|
||||
self.TESTSCHEMAS = [file_context.NameSchema( #rank=s['rank'],
|
||||
schema=s)
|
||||
for s in self.TESTSCHEMA_LIST]
|
||||
|
||||
def test_parsing_filenames_w_episode_parser(self):
|
||||
abx_episode_parser = file_context.NameParsers['abx_episode']()
|
||||
|
||||
data = abx_episode_parser('S1E01-SF-4-SoyuzDMInt-cam.blend', [])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'blend',
|
||||
'role': 'cam',
|
||||
'hierarchy': 'episode',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': 1},
|
||||
'rank': 'block',
|
||||
'seq': {'code': 'SF'},
|
||||
'block': {'code': 4, 'title': 'Soyuz DMI nt'}})
|
||||
|
||||
data = abx_episode_parser('S1E02-MM-MediaMontage-compos.blend', [])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'blend',
|
||||
'role': 'compos',
|
||||
'hierarchy': 'episode',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': 2},
|
||||
'rank': 'seq',
|
||||
'seq': {'code': 'MM', 'title': 'Media Montage'}})
|
||||
|
||||
data = abx_episode_parser('S1E01-PC-PressConference-edit.kdenlive', [])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'kdenlive',
|
||||
'role': 'edit',
|
||||
'hierarchy': 'episode',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': 1},
|
||||
'rank': 'seq',
|
||||
'seq': {'code': 'PC', 'title': 'Press Conference'}})
|
||||
|
||||
data = abx_episode_parser('S1E01-LA-Launch.kdenlive', [])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'kdenlive',
|
||||
'role': 'edit',
|
||||
'hierarchy': 'episode',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': 1},
|
||||
'rank': 'seq',
|
||||
'seq': {'code': 'LA', 'title': 'Launch'}})
|
||||
|
||||
|
||||
def test_parsing_filenames_w_schema_parser(self):
|
||||
abx_schema_parser = file_context.NameParsers['abx_schema'](
|
||||
schemas=self.TESTSCHEMAS, definitions=self.TESTDEFS)
|
||||
|
||||
data = abx_schema_parser('S1E01-SF-4-SoyuzDMInt-cam.blend',
|
||||
namepath=self.TESTNAMEPATHS[0])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'blend',
|
||||
'comment': None,
|
||||
'role': 'cam',
|
||||
'title': 'SoyuzDMInt',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': '01'},
|
||||
'sequence': {'code': 'SF'},
|
||||
'block': {'code': '4', 'title':'SoyuzDMInt'},
|
||||
'rank': 'block'}
|
||||
)
|
||||
|
||||
data = abx_schema_parser('S1E02-MM-MediaMontage-compos.blend',
|
||||
namepath=self.TESTNAMEPATHS[1])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'blend',
|
||||
'comment': None,
|
||||
'role': 'compos',
|
||||
'title': 'MediaMontage',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': '02'},
|
||||
'sequence': {'code': 'MM', 'title':'MediaMontage'},
|
||||
'rank': 'sequence'}
|
||||
)
|
||||
|
||||
data = abx_schema_parser('S1E01-PC-PressConference-edit.kdenlive',
|
||||
namepath=self.TESTNAMEPATHS[2])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'kdenlive',
|
||||
'comment': None,
|
||||
'role': 'edit',
|
||||
'title': 'PressConference',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': '01'},
|
||||
'sequence': {'code': 'PC', 'title':'PressConference'},
|
||||
'rank': 'sequence'}
|
||||
)
|
||||
|
||||
data = abx_schema_parser('S1E01-LA-Launch.kdenlive',
|
||||
namepath=self.TESTNAMEPATHS[3])
|
||||
self.assertDictEqual(data[1],
|
||||
{'filetype': 'kdenlive',
|
||||
'comment': None,
|
||||
'role': 'edit',
|
||||
'title': 'Launch',
|
||||
'series': {'code': 'S1'},
|
||||
'episode': {'code': '01'},
|
||||
'sequence': {'code': 'LA', 'title':'Launch'},
|
||||
'rank': 'sequence'}
|
||||
)
|
||||
|
||||
|
||||
class FileContext_Implementation_UnitTests(unittest.TestCase):
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TESTPATH = os.path.join(TESTDATA, 'myproject/Episodes/' +
|
||||
'A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
|
||||
def test_filecontext_finds_and_loads_file(self):
|
||||
fc = file_context.FileContext(self.TESTPATH)
|
||||
self.assertEqual(fc.filename, 'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
self.assertEqual(fc.root, os.path.join(self.TESTDATA, 'myproject'))
|
||||
self.assertListEqual(fc.folders,
|
||||
['myproject', 'Episodes', 'A.001-Pilot', 'Seq', 'LP-LastPoint'])
|
||||
|
||||
def test_filecontext_gets_correct_yaml_for_file(self):
|
||||
fc = file_context.FileContext(self.TESTPATH)
|
||||
# Look for data from three expected YAML files:
|
||||
# From the project YAML file:
|
||||
self.assertEqual(fc.provided_data['definitions']['omit_ranks']['scene'], 3)
|
||||
# From the sequence directory YAML file:
|
||||
self.assertEqual(fc.provided_data['project_unit'][-2]['name'], 'Last Point')
|
||||
# From the sidecar YAML file:
|
||||
self.assertEqual(fc.provided_data['project_unit'][-1]['code'], 1)
|
||||
|
||||
def test_filecontext_gets_correct_filename_info(self):
|
||||
fc = file_context.FileContext(self.TESTPATH)
|
||||
self.assertEqual(fc.filetype, 'txt')
|
||||
self.assertEqual(fc.role, 'anim')
|
||||
self.assertEqual(fc.title, 'BeginningOfEnd')
|
||||
self.assertEqual(fc.comment, None)
|
||||
|
||||
|
||||
class FileContext_API_UnitTests(unittest.TestCase):
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TESTPATH = os.path.join(TESTDATA, 'myproject/Episodes/' +
|
||||
'A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
|
||||
def setUp(self):
|
||||
self.fc = file_context.FileContext(self.TESTPATH)
|
||||
|
||||
def test_filecontext_API_namepath(self):
|
||||
self.assertListEqual( self.fc.namepath, ['myproject', 'A', 1, 'LP', 1])
|
||||
|
||||
def test_filecontext_API_rank(self):
|
||||
self.assertEqual(self.fc.rank, 'block')
|
||||
|
||||
def test_filecontext_API_code(self):
|
||||
self.assertEqual(self.fc.code, 1)
|
||||
|
||||
def test_filecontext_API_name(self):
|
||||
self.assertEqual(self.fc.name, 'BeginningOfEnd')
|
||||
|
||||
def test_filecontext_API_designation(self):
|
||||
self.assertEqual(self.fc.designation,
|
||||
'myproject-A.001-LP-1')
|
||||
|
||||
def test_filecontext_API_fullname(self):
|
||||
self.assertEqual(self.fc.fullname, 'myproject-A.001-LP-1-BeginningOfEnd')
|
||||
|
||||
def test_filecontext_API_shortname(self):
|
||||
self.assertEqual(self.fc.shortname, 'A.001-LP-1-BeginningOfEnd')
|
||||
|
||||
def test_filecontext_API_scene_name(self):
|
||||
self.assertEqual(self.fc.get_scene_name('Test'), 'LP-1 Test')
|
||||
|
||||
def test_filecontext_API_render_root(self):
|
||||
self.assertEqual(os.path.abspath(self.fc.render_root),
|
||||
os.path.abspath(os.path.join(self.TESTDATA,
|
||||
'myproject/Episodes/A.001-Pilot/Renders')))
|
||||
|
||||
def test_filecontext_API_get_render_path(self):
|
||||
self.assertEqual(os.path.abspath(self.fc.get_render_path(suffix='T')),
|
||||
os.path.abspath(os.path.join(self.TESTDATA,
|
||||
'myproject', 'Episodes', 'A.001-Pilot', 'Renders',
|
||||
'T', 'A.001-LP-1', 'A.001-LP-1-T-f#####.png')))
|
||||
|
||||
def test_filecontext_API_new_name_context_explicit(self):
|
||||
nc = self.fc.new_name_context(shot='A')
|
||||
self.assertEqual(nc.get_scene_name('Exp'), 'LP-1-A Exp')
|
||||
|
||||
def test_filecontext_API_new_name_context_implicit(self):
|
||||
nc = self.fc.new_name_context(rank='shot')
|
||||
self.assertEqual(nc.get_scene_name('Imp'), 'LP-1-A Imp')
|
||||
|
||||
|
||||
class NameContext_API_Tests(unittest.TestCase):
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TESTPATH = os.path.join(TESTDATA, 'myproject/Episodes/' +
|
||||
'A.001-Pilot/Seq/LP-LastPoint/' +
|
||||
'A.001-LP-1-BeginningOfEnd-anim.txt')
|
||||
|
||||
def setUp(self):
|
||||
fc = file_context.FileContext(self.TESTPATH)
|
||||
self.nc = fc.new_name_context(rank='shot', shot='A')
|
||||
|
||||
def test_namecontext_reports_correct_rank(self):
|
||||
self.assertEqual(self.nc.rank, 'shot')
|
||||
|
||||
def test_namecontext_reports_correct_code(self):
|
||||
self.assertEqual(self.nc.code, 'A')
|
||||
|
||||
def test_namecontext_reports_correct_namepath(self):
|
||||
self.assertEqual(self.nc.namepath, ['myproject', 'A', 1, 'LP', 1, None, 'A'])
|
||||
|
||||
|
||||
class FileContext_FailOver_Tests(unittest.TestCase):
|
||||
"""
|
||||
Tests of how well FileContext copes with imperfect data.
|
||||
|
||||
It's very likely that ABX will encounter projects that aren't
|
||||
set up perfectly (or at all), and we don't want it to crash
|
||||
in that situation, but rather fail gracefully or even work
|
||||
around the problem.
|
||||
"""
|
||||
TESTDATA = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '..', 'testdata'))
|
||||
|
||||
TEST_EMPTY_PROJECT = os.path.join(TESTDATA, 'empty')
|
||||
|
||||
TEST_NONEXISTENT_PATH = os.path.join(TESTDATA,
|
||||
'empty', 'Non', 'Existing', 'F', 'F-1-NonExisting-anim.blend')
|
||||
|
||||
TEST_NO_YAML = os.path.join(TESTDATA,
|
||||
'yamlless', 'Episodes', 'Ae1-Void', 'Seq', 'VN-VagueName',
|
||||
'Ae1-VN-1-VoidOfData-anim.txt')
|
||||
|
||||
TEST_MINIMAL_YAML = os.path.join(TESTDATA,
|
||||
'yaminimal', 'Episodes', 'Ae1-Void', 'Seq', 'VN-VagueName',
|
||||
'Ae1-VN-1-VoidOfData-anim.txt')
|
||||
|
||||
def test_filecontext_no_project_path(self):
|
||||
fc = file_context.FileContext()
|
||||
# What to test?
|
||||
# The main thing is that it doesn't crash.
|
||||
|
||||
def test_filecontext_failover_empty_project(self):
|
||||
fc = file_context.FileContext(self.TEST_EMPTY_PROJECT)
|
||||
|
||||
def test_filecontext_failover_nonexisting_file(self):
|
||||
fc = file_context.FileContext(self.TEST_NONEXISTENT_PATH)
|
||||
|
||||
def test_filecontext_failover_no_yaml(self):
|
||||
fc = file_context.FileContext(self.TEST_NO_YAML)
|
||||
|
||||
def test_filecontext_failover_minimal_yaml(self):
|
||||
fc = file_context.FileContext(self.TEST_MINIMAL_YAML)
|
||||
|
||||
|