|
| 1 | +#!./venv/bin/python |
| 2 | + |
| 3 | +from functools import partial |
| 4 | +import hashlib |
| 5 | +import json |
| 6 | +import operator |
| 7 | +import subprocess |
| 8 | + |
| 9 | + |
| 10 | +def _create_root_node(node_class, *args, **kwargs): |
| 11 | + root = node_class(*args, **kwargs) |
| 12 | + root._update_hash() |
| 13 | + return root |
| 14 | + |
| 15 | + |
| 16 | +def _create_child_node(node_class, parent, *args, **kwargs): |
| 17 | + child = node_class([parent], *args, **kwargs) |
| 18 | + child._update_hash() |
| 19 | + return child |
| 20 | + |
| 21 | + |
| 22 | +class _Node(object): |
| 23 | + def __init__(self, parents): |
| 24 | + parent_hashes = [parent.hash for parent in parents] |
| 25 | + assert len(parent_hashes) == len(set(parent_hashes)), 'Same node cannot be included as parent multiple times' |
| 26 | + self.parents = parents |
| 27 | + |
| 28 | + @classmethod |
| 29 | + def _add_operator(cls, node_class): |
| 30 | + if getattr(node_class, 'STATIC', False): |
| 31 | + @classmethod |
| 32 | + def func(cls2, *args, **kwargs): |
| 33 | + return _create_root_node(node_class, *args, **kwargs) |
| 34 | + else: |
| 35 | + def func(self, *args, **kwargs): |
| 36 | + return _create_child_node(node_class, self, *args, **kwargs) |
| 37 | + setattr(cls, node_class.NAME, func) |
| 38 | + |
| 39 | + @classmethod |
| 40 | + def _add_operators(cls, node_classes): |
| 41 | + [cls._add_operator(node_class) for node_class in node_classes] |
| 42 | + |
| 43 | + @property |
| 44 | + def _props(self): |
| 45 | + return {k: v for k, v in self.__dict__.items() if k not in ['parents', 'hash']} |
| 46 | + |
| 47 | + def __repr__(self): |
| 48 | + # TODO: exclude default values. |
| 49 | + props = self._props |
| 50 | + formatted_props = ['{}={!r}'.format(key, props[key]) for key in sorted(self._props)] |
| 51 | + return '{}({})'.format(self.NAME, ','.join(formatted_props)) |
| 52 | + |
| 53 | + def __eq__(self, other): |
| 54 | + return self.hash == other.hash |
| 55 | + |
| 56 | + def _update_hash(self): |
| 57 | + my_hash = hashlib.md5(json.dumps(self._props)).hexdigest() |
| 58 | + parent_hashes = [parent.hash for parent in self.parents] |
| 59 | + hashes = parent_hashes + [my_hash] |
| 60 | + self.hash = hashlib.md5(','.join(hashes)).hexdigest() |
| 61 | + |
| 62 | + |
| 63 | +class _InputNode(_Node): |
| 64 | + pass |
| 65 | + |
| 66 | + |
| 67 | +class _FileInputNode(_InputNode): |
| 68 | + NAME = 'file_input' |
| 69 | + STATIC = True |
| 70 | + |
| 71 | + def __init__(self, filename): |
| 72 | + super(_FileInputNode, self).__init__(parents=[]) |
| 73 | + self.filename = filename |
| 74 | + |
| 75 | + |
| 76 | +class _FilterNode(_Node): |
| 77 | + pass |
| 78 | + |
| 79 | + |
| 80 | +class _TrimFilterNode(_FilterNode): |
| 81 | + NAME = 'trim' |
| 82 | + |
| 83 | + def __init__(self, parents, start_frame, end_frame, setpts='PTS-STARTPTS'): |
| 84 | + super(_TrimFilterNode, self).__init__(parents) |
| 85 | + self.start_frame = start_frame |
| 86 | + self.end_frame = end_frame |
| 87 | + self.setpts = setpts |
| 88 | + |
| 89 | + |
| 90 | +class _ConcatNode(_Node): |
| 91 | + NAME = 'concat' |
| 92 | + STATIC = True |
| 93 | + |
| 94 | + def __init__(self, *parents): |
| 95 | + super(_ConcatNode, self).__init__(parents) |
| 96 | + |
| 97 | + |
| 98 | +class _OutputNode(_Node): |
| 99 | + @classmethod |
| 100 | + def _get_stream_name(cls, name): |
| 101 | + return '[{}]'.format(name) |
| 102 | + |
| 103 | + @classmethod |
| 104 | + def _get_input_args(cls, input_node): |
| 105 | + if isinstance(input_node, _FileInputNode): |
| 106 | + args = ['-i', input_node.filename] |
| 107 | + else: |
| 108 | + assert False, 'Unsupported input node: {}'.format(input_node) |
| 109 | + return args |
| 110 | + |
| 111 | + @classmethod |
| 112 | + def _topo_sort(cls, start_node): |
| 113 | + marked_nodes = [] |
| 114 | + sorted_nodes = [] |
| 115 | + child_map = {} |
| 116 | + def visit(node, child): |
| 117 | + assert node not in marked_nodes, 'Graph is not a DAG' |
| 118 | + if child is not None: |
| 119 | + if node not in child_map: |
| 120 | + child_map[node] = [] |
| 121 | + child_map[node].append(child) |
| 122 | + if node not in sorted_nodes: |
| 123 | + marked_nodes.append(node) |
| 124 | + [visit(parent, node) for parent in node.parents] |
| 125 | + marked_nodes.remove(node) |
| 126 | + sorted_nodes.append(node) |
| 127 | + unmarked_nodes = [start_node] |
| 128 | + while unmarked_nodes: |
| 129 | + visit(unmarked_nodes.pop(), None) |
| 130 | + return sorted_nodes, child_map |
| 131 | + |
| 132 | + @classmethod |
| 133 | + def _get_filter(cls, node): |
| 134 | + # TODO: find a better way to do this instead of ugly if/elifs. |
| 135 | + if isinstance(node, _TrimFilterNode): |
| 136 | + return 'trim=start_frame={}:end_frame={},setpts={}'.format(node.start_frame, node.end_frame, node.setpts) |
| 137 | + elif isinstance(node, _ConcatNode): |
| 138 | + return 'concat=n={}'.format(len(node.parents)) |
| 139 | + else: |
| 140 | + assert False, 'Unsupported filter node: {}'.format(node) |
| 141 | + |
| 142 | + @classmethod |
| 143 | + def _get_filter_spec(cls, i, node, stream_name_map): |
| 144 | + stream_name = cls._get_stream_name('v{}'.format(i)) |
| 145 | + stream_name_map[node] = stream_name |
| 146 | + inputs = [stream_name_map[parent] for parent in node.parents] |
| 147 | + filter_spec = '{}{}{}'.format(''.join(inputs), cls._get_filter(node), stream_name) |
| 148 | + return filter_spec |
| 149 | + |
| 150 | + @classmethod |
| 151 | + def _get_filter_arg(cls, filter_nodes, stream_name_map): |
| 152 | + filter_specs = [cls._get_filter_spec(i, node, stream_name_map) for i, node in enumerate(filter_nodes)] |
| 153 | + return ';'.join(filter_specs) |
| 154 | + |
| 155 | + @classmethod |
| 156 | + def _get_global_args(cls, node): |
| 157 | + if isinstance(node, _OverwriteOutputNode): |
| 158 | + return ['-y'] |
| 159 | + else: |
| 160 | + assert False, 'Unsupported global node: {}'.format(node) |
| 161 | + |
| 162 | + @classmethod |
| 163 | + def _get_output_args(cls, node, stream_name_map): |
| 164 | + args = [] |
| 165 | + if not isinstance(node, _MergeOutputsNode): |
| 166 | + stream_name = stream_name_map[node.parents[0]] |
| 167 | + if stream_name != '[0]': |
| 168 | + args += ['-map', stream_name] |
| 169 | + if isinstance(node, _FileOutputNode): |
| 170 | + args += [node.filename] |
| 171 | + else: |
| 172 | + assert False, 'Unsupported output node: {}'.format(node) |
| 173 | + return args |
| 174 | + |
| 175 | + def get_args(self): |
| 176 | + args = [] |
| 177 | + # TODO: group nodes together, e.g. `-i somefile -r somerate`. |
| 178 | + sorted_nodes, child_map = self._topo_sort(self) |
| 179 | + input_nodes = [node for node in sorted_nodes if isinstance(node, _InputNode)] |
| 180 | + output_nodes = [node for node in sorted_nodes if isinstance(node, _OutputNode) and not |
| 181 | + isinstance(node, _GlobalNode)] |
| 182 | + global_nodes = [node for node in sorted_nodes if isinstance(node, _GlobalNode)] |
| 183 | + filter_nodes = [node for node in sorted_nodes if node not in (input_nodes + output_nodes + global_nodes)] |
| 184 | + stream_name_map = {node: self._get_stream_name(i) for i, node in enumerate(input_nodes)} |
| 185 | + filter_arg = self._get_filter_arg(filter_nodes, stream_name_map) |
| 186 | + args += reduce(operator.add, [self._get_input_args(node) for node in input_nodes]) |
| 187 | + if filter_arg: |
| 188 | + args += ['-filter_complex', filter_arg] |
| 189 | + args += reduce(operator.add, [self._get_output_args(node, stream_name_map) for node in output_nodes]) |
| 190 | + args += reduce(operator.add, [self._get_global_args(node) for node in global_nodes], []) |
| 191 | + return args |
| 192 | + |
| 193 | + def run(self): |
| 194 | + args = ['ffmpeg'] + self.get_args() |
| 195 | + subprocess.check_call(args) |
| 196 | + |
| 197 | + |
| 198 | +class _GlobalNode(_OutputNode): |
| 199 | + def __init__(self, parents): |
| 200 | + assert len(parents) == 1 |
| 201 | + assert isinstance(parents[0], _OutputNode), 'Global nodes can only be attached after output nodes' |
| 202 | + super(_GlobalNode, self).__init__(parents) |
| 203 | + |
| 204 | + |
| 205 | +class _OverwriteOutputNode(_GlobalNode): |
| 206 | + NAME = 'overwrite_output' |
| 207 | + |
| 208 | + |
| 209 | + |
| 210 | +class _MergeOutputsNode(_OutputNode): |
| 211 | + NAME = 'merge_outputs' |
| 212 | + |
| 213 | + def __init__(self, *parents): |
| 214 | + assert not any([not isinstance(parent, _OutputNode) for parent in parents]), 'Can only merge output streams' |
| 215 | + super(_MergeOutputsNode, self).__init__(*parents) |
| 216 | + |
| 217 | + |
| 218 | +class _FileOutputNode(_OutputNode): |
| 219 | + NAME = 'file_output' |
| 220 | + |
| 221 | + def __init__(self, parents, filename): |
| 222 | + super(_FileOutputNode, self).__init__(parents) |
| 223 | + self.filename = filename |
| 224 | + |
| 225 | + |
| 226 | +NODE_CLASSES = [ |
| 227 | + _ConcatNode, |
| 228 | + _FileInputNode, |
| 229 | + _FileOutputNode, |
| 230 | + _OverwriteOutputNode, |
| 231 | + _TrimFilterNode, |
| 232 | +] |
| 233 | + |
| 234 | +_Node._add_operators(NODE_CLASSES) |
| 235 | + |
| 236 | + |
| 237 | +for node_class in NODE_CLASSES: |
| 238 | + if getattr(node_class, 'STATIC', False): |
| 239 | + func = _create_root_node |
| 240 | + else: |
| 241 | + func = _create_child_node |
| 242 | + globals()[node_class.NAME] = partial(func, node_class) |
| 243 | + |
| 244 | + |
| 245 | +def get_args(node): |
| 246 | + assert isinstance(node, _OutputNode), 'Cannot generate ffmpeg args for non-output node' |
| 247 | + return node.get_args() |
| 248 | + |
| 249 | + |
| 250 | +def run(node): |
| 251 | + assert isinstance(node, _OutputNode), 'Cannot run ffmpeg on non-output node' |
| 252 | + return node.run() |
0 commit comments