diff --git a/core/socket_data.py b/core/socket_data.py new file mode 100644 index 0000000000000000000000000000000000000000..1d161ba0fb80c542e6a7b2905c14e361a36d6a5d --- /dev/null +++ b/core/socket_data.py @@ -0,0 +1,114 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + +from sverchok import data_structure + +##################################### +# socket data cache # +##################################### + +sentinel = object() + +# socket cache +socket_data_cache = {} + +# faster than builtin deep copy for us. +# useful for our limited case +# we should be able to specify vectors here to get them create +# or stop destroying them when in vector socket. + + +def sv_deep_copy(lst): + """return deep copied data of list/tuple structure""" + if isinstance(lst, (list, tuple)): + if lst and not isinstance(lst[0], (list, tuple)): + return lst[:] + return [sv_deep_copy(l) for l in lst] + return lst + + +# Build string for showing in socket label +def SvGetSocketInfo(socket): + """returns string to show in socket label""" + global socket_data_cache + ng = socket.id_data.name + + if socket.is_output: + s_id = socket.socket_id + elif socket.is_linked: + s_id = socket.other.socket_id + else: + return '' + if ng in socket_data_cache: + if s_id in socket_data_cache[ng]: + data = socket_data_cache[ng][s_id] + if data: + return str(len(data)) + return '' + + +def SvSetSocket(socket, out): + """sets socket data for socket""" + global socket_data_cache + if data_structure.DEBUG_MODE: + if not socket.is_output: + print("Warning, {} setting input socket: {}".format(socket.node.name, socket.name)) + if not socket.is_linked: + print("Warning: {} setting unconncted socket: {}".format(socket.node.name, socket.name)) + s_id = socket.socket_id + s_ng = socket.id_data.name + if s_ng not in socket_data_cache: + socket_data_cache[s_ng] = {} + socket_data_cache[s_ng][s_id] = out + + +def SvGetSocket(socket, deepcopy=True): + """gets socket data from socket, + if deep copy is True a deep copy is make_dep_dict, + to increase performance if the node doesn't mutate input + set to False and increase performance substanstilly + """ + global socket_data_cache + if socket.is_linked: + other = socket.other + s_id = other.socket_id + s_ng = other.id_data.name + if s_ng not in socket_data_cache: + raise LookupError + if s_id in socket_data_cache[s_ng]: + out = socket_data_cache[s_ng][s_id] + if deepcopy: + return sv_deep_copy(out) + else: + return out + else: + if data_structure.DEBUG_MODE: + print("cache miss:", socket.node.name, "->", socket.name, "from:", other.node.name, "->", other.name) + raise SvNoDataError + # not linked + raise SvNoDataError + +class SvNoDataError(LookupError): + pass + +def reset_socket_cache(ng): + """ + Reset socket cache either for node group. + """ + global socket_data_cache + socket_data_cache[ng.name] = {} diff --git a/core/update_system.py b/core/update_system.py index c886fe4fb55bcf82eb3b23a43aecb303936e9769..c74e625112d80b8ba70d350061879823c649bacb 100644 --- a/core/update_system.py +++ b/core/update_system.py @@ -23,7 +23,7 @@ import bpy from mathutils import Vector from sverchok import data_structure -from sverchok.data_structure import SvNoDataError +from sverchok.core.socket_data import SvNoDataError, reset_socket_cache import sverchok import traceback @@ -367,7 +367,7 @@ def build_update_list(ng=None): out = [make_update_list(ng, s, deps) for s in node_sets] update_cache[ng.name] = out partial_update_cache[ng.name] = {} - data_structure.reset_socket_cache(ng) + reset_socket_cache(ng) def process_to_node(node): diff --git a/data_structure.py b/data_structure.py index 31db29b7690dae879d178cf93467a7742286678d..d2c937e95fd0515d4bb479499803dc821ac98989 100755 --- a/data_structure.py +++ b/data_structure.py @@ -16,7 +16,6 @@ # # ##### END GPL LICENSE BLOCK ##### -from functools import reduce from math import radians import itertools import time @@ -33,129 +32,18 @@ RELOAD_EVENT = False # this is set correctly later. SVERCHOK_NAME = "sverchok" -#handle for object in node -temp_handle = {} -# cache node group update trees it not used, as i see -# cache_nodes = {} -# socket cache -socket_data_cache = {} -# for viewer baker node cache cache_viewer_baker = {} -sv_Vars = {} -# note used? - -#bmesh_mapping = {} -#per_cache = {} sentinel = object() -##################################################### -################### update magic #################### -##################################################### -# is this used? -# i think no - - -# main update -def read_cnodes(cnode): - global cache_nodes - if cnode not in cache_nodes: - return None - return cache_nodes[cnode] - - -def write_cnodes(cnode, number): - global cache_nodes - if cnode in cache_nodes: - del cache_nodes[cnode] - cache_nodes[cnode] = number - - -def clear_cnodes(cnode='ALL'): - global cache_nodes - if cnode == 'ALL': - for i in cache_nodes.items: - del cache_nodes[i] - else: - if read_cnodes(cnode) is not None: - del cache_nodes[cnode] - - -def initialize_cnodes(): - node_name = 'GLOBAL CNODE' - write_cnodes(node_name, 1) - write_cnodes('LOCK UPDATE CNODES', 1) - - -def check_update_node(node_name, write=False): - numb = read_cnodes(node_name) - etalon = read_cnodes('GLOBAL CNODE') - #print('etalon',etalon) - if numb == etalon: - return False - else: - if write: - write_cnodes(node_name, etalon) - return True - - -def ini_update_cnode(node_name): - if read_cnodes('LOCK UPDATE CNODES') == 1: - return False - - etalon = read_cnodes('GLOBAL CNODE') - if etalon is None: - initialize_cnodes() - etalon = 1 - else: - etalon += 1 - - write_cnodes('GLOBAL CNODE', etalon) - write_cnodes(node_name, etalon) - return True - - -def is_updated_cnode(): - write_cnodes('LOCK UPDATE CNODES', 0) - - -def lock_updated_cnode(): - write_cnodes('LOCK UPDATE CNODES', 1) - - -##################################################### -################### bmesh magic ##################### -##################################################### - - -def read_bmm(bm_ref): - global bmesh_mapping - if bm_ref not in bmesh_mapping: - return None - return bmesh_mapping[bm_ref] - - -def write_bmm(bm_ref, bm): - global bmesh_mapping - if bm_ref in bmesh_mapping: - del bmesh_mapping[bm_ref] - bmesh_mapping[bm_ref] = bm - - -def clear_bmm(bm_ref='ALL'): - global bmesh_mapping - if bm_ref == 'ALL': - for i in bmesh_mapping.items: - del bmesh_mapping[i] - else: - if read_bmm(bm_ref) is not None: - del bmesh_mapping[bm_ref] ##################################################### ################### cache magic ##################### ##################################################### +#handle for object in node +temp_handle = {} def handle_delete(handle): if handle in temp_handle: @@ -183,9 +71,12 @@ def handle_check(handle, prop): ##################################################### -# creates an infinite iterator -# use with terminating input def repeat_last(lst): + """ + creates an infinite iterator the first each element in lst + and then keep repeating the last element, + use with terminating input + """ i = -1 while lst: i += 1 @@ -195,8 +86,10 @@ def repeat_last(lst): yield lst[-1] -# longest list matching [[1,2,3,4,5], [10,11]] -> [[1,2,3,4,5], [10,11,11,11,11]] def match_long_repeat(lsts): + """return matched list, using the last value to fill lists as needed + longest list matching [[1,2,3,4,5], [10,11]] -> [[1,2,3,4,5], [10,11,11,11,11]] + """ max_l = 0 tmp = [] for l in lsts: @@ -209,8 +102,10 @@ def match_long_repeat(lsts): return list(map(list, zip(*zip(*tmp)))) -# longest list matching, cycle [[1,2,3,4,5] ,[10,11]] -> [[1,2,3,4,5] ,[10,11,10,11,10]] def match_long_cycle(lsts): + """return matched list, cycling the shorter lists + longest list matching, cycle [[1,2,3,4,5] ,[10,11]] -> [[1,2,3,4,5] ,[10,11,10,11,10]] + """ max_l = 0 tmp = [] for l in lsts: @@ -226,35 +121,37 @@ def match_long_cycle(lsts): # when you intent to use lenght of first list to control WHILE loop duration # and you do not want to change the length of the first list, but you want the second list # lenght to by not less than the length of the first -def second_as_first_cycle(F,S): - if len(F)>len(S): +def second_as_first_cycle(F, S): + if len(F) > len(S): return list(map(list, zip(*zip(*[F, itertools.cycle(S)])))) else: - return [F,S] + return [F, S] - -# cross matching -# [[1,2], [5,6,7]] -> [[1,1,1,2,2,2], [5,6,7,5,6,7]] def match_cross(lsts): + """ return cross matched lists + [[1,2], [5,6,7]] -> [[1,1,1,2,2,2], [5,6,7,5,6,7]] + """ return list(map(list, zip(*itertools.product(*lsts)))) -# use this one -# cross matching 2, more useful order -# [[1,2], [5,6,7]] ->[[1, 2, 1, 2, 1, 2], [5, 5, 6, 6, 7, 7]] -# but longer and less elegant expression -# performance difference is minimal since number of lists is usually small def match_cross2(lsts): + """ return cross matched lists + [[1,2], [5,6,7]] ->[[1, 2, 1, 2, 1, 2], [5, 5, 6, 6, 7, 7]] + """ return list(reversed(list(map(list, zip(*itertools.product(*reversed(lsts))))))) # Shortest list decides output length [[1,2,3,4,5], [10,11]] -> [[1,2], [10, 11]] def match_short(lsts): + """return lists of equal length using the Shortest list to decides length + Shortest list decides output length [[1,2,3,4,5], [10,11]] -> [[1,2], [10, 11]] + """ return list(map(list, zip(*zip(*lsts)))) -# extends list so len(l) == count def fullList(l, count): + """extends list l so len is at least count if needed with the + last element of l""" d = count - len(l) if d > 0: l.extend([l[-1] for a in range(d)]) @@ -262,9 +159,11 @@ def fullList(l, count): def sv_zip(*iterables): - # zip('ABCD', 'xy') --> Ax By - # like standard zip but list instead of tuple + """zip('ABCD', 'xy') --> Ax By + like standard zip but list instead of tuple + """ iterators = [iter(it) for it in iterables] + sentinel = object() # use internal sentinel while iterators: result = [] for it in iterators: @@ -274,167 +173,6 @@ def sv_zip(*iterables): result.append(elem) yield result -def checking_links(process): - '''Decorator for process method of node. - This decorator does stanard checks for mandatory input and output links. - ''' - - def real_process(node): - # check_mandatory_links() node method should return True - # if all mandatory inputs and outputs are linked. - # If it returns False then node will just skip processing. - if hasattr(node, "check_mandatory_links"): - if not node.check_mandatory_links(): - return - else: - # If check_mandatory_links() method is not defined, then node can - # define list of mandatory inputs and/or outputs. - # Node will skip processing if any of mandatory inputs is not linked. - # It will also skip processing if none of mandatory outputs is linked. - if hasattr(node, "input_descriptors"): - mandatory_inputs = [descriptor.name for descriptor in node.input_descriptors if descriptor.is_mandatory] - if not all([node.inputs[name].is_linked for name in mandatory_inputs]): - print("Node {}: skip processing: not all of mandatory inputs {} are linked.".format(node.name, mandatory_inputs)) - return - if hasattr(node, "output_descriptors"): - mandatory_outputs = [descriptor.name for descriptor in node.output_descriptors if descriptor.is_mandatory] - if not any([node.outputs[name].is_linked for name in mandatory_outputs]): - print("Node {}: skip processing: none of mandatory outputs {} are linked.".format(node.name, mandatory_outputs)) - return - - return process(node) - - real_process.__name__ = process.__name__ - real_process.__doc__ = process.__doc__ - return real_process - -def iterate_process(method, matcher, *inputs, node=None): - '''Shortcut function for usual iteration over set of input lists. - - This is shortcut for boilerplate code like - - res1 = [] - res2 = [] - params = match_long_repeat([input1,input2]) - for i1, i2 in zip(*params): - r1,r2 = self.method(i1,i2) - res1.append(r1) - res2.append(r2) - return res1, res2 - ''' - - data = matcher(inputs) - if node is None: - results = [list(method(*d)) for d in zip(*data)] - else: - results = [list(method(node, *d)) for d in zip(*data)] - return list(zip(*results)) - -class Input(object): - '''Node input socket metainformation descriptor.''' - - def __init__(self, socktype, name, identifier=None, is_mandatory=True, default=sentinel, deepcopy=True): - self.socktype = socktype - self.name = name - self.identifier = identifier if identifier is not None else name - self.default = default - self.deepcopy = deepcopy - self.is_mandatory = is_mandatory - - def __str__(self): - return self.name - - def create(self, node): - return node.inputs.new(self.socktype, self.name, self.identifier) - - def get(self, node): - return node.inputs[self.name].sv_get(default=self.default, deepcopy=self.deepcopy) - -class Output(object): - '''Node output socket metainformation descriptor.''' - - def __init__(self, socktype, name, is_mandatory=True): - self.socktype = socktype - self.name = name - self.is_mandatory = is_mandatory - - def __str__(self): - return self.name - - def create(self, node): - node.outputs.new(self.socktype, self.name) - - def set(self, node, value): - if node.outputs[self.name].is_linked: - node.outputs[self.name].sv_set(value) - -def match_inputs(matcher, inputs, outputs): - '''Decorator for inputs/outputs boilerplate. - - Usage: - - @match_inputs(match_long_repeat, - inputs=[Input(...), Input(...)], - outputs=[Output(...), Output(...)]) - def process(self, i1, i2): - ... - return res1, res2 - - This is shortcut for code like - - def process(self): - i1s = self.inputs['i1'].sv_get(..) - i2s = self.inputs['i2'].sv_get(..) - res1 = [] - res2 = [] - - params = match_long_repeat([i1s, i2s]) - for i1,i2 in zip(*params): - ... - res1.append(r1) - res2.append(r2) - - if self.outputs['r1'].is_linked: - self.outputs['r1'].sv_set(res1) - if self.outputs['r2'].is_linked: - self.outputs['r2'].sv_set(res2) - ''' - - def decorator(process): - def real_process(node): - inputs_data = [input_descriptor.get(node) for input_descriptor in inputs] - results = iterate_process(process, matcher, *inputs_data, node=node) - for result, output_descriptor in zip(results, outputs): - output_descriptor.set(node, result) - - real_process.__name__ = process.__name__ - real_process.__doc__ = process.__doc__ - - return real_process - - return decorator - -def std_links_processing(matcher): - '''Shortcut decorator for "standard" inputs/outputs sockets processing routine. - - This is shortcut for combination of @checking_links and @match_inputs. - Inputs and outputs descriptors are taken from node.input_descriptors and - node.output_descriptors correspondingly. - ''' - - def decorator(process): - def real_process(node): - nonlocal process - process = match_inputs(matcher, node.input_descriptors, node.output_descriptors)(process) - process = checking_links(process) - return process(node) - - real_process.__name__ = process.__name__ - real_process.__doc__ = process.__doc__ - - return real_process - - return decorator ##################################################### ################# list levels magic ################# @@ -442,9 +180,11 @@ def std_links_processing(matcher): # working with nesting levels # define data floor +# NOTE, these function cannot possibly work in all scenarios, use with care -# data from nasting to standart: TO container( objects( lists( floats, ), ), ) def dataCorrect(data, nominal_dept=2): + """data from nasting to standart: TO container( objects( lists( floats, ), ), ) + """ dept = levelsOflist(data) output = [] if not dept: # for empty lists @@ -456,8 +196,10 @@ def dataCorrect(data, nominal_dept=2): return output -# from standart data to initial levels: to nasting lists container( objects( lists( nasty_lists( floats, ), ), ), ) это невозможно! def dataSpoil(data, dept): + """from standart data to initial levels: to nested lists + container( objects( lists( nested_lists( floats, ), ), ), ) это невозможно! + """ if dept: out = [] for d in data: @@ -467,8 +209,8 @@ def dataSpoil(data, dept): return out -# data from nasting to standart: TO container( objects( lists( floats, ), ), ) def dataStandart(data, dept, nominal_dept): + """data from nasting to standart: TO container( objects( lists( floats, ), ), )""" deptl = dept - 1 output = [] for object in data: @@ -480,8 +222,8 @@ def dataStandart(data, dept, nominal_dept): return output -# calc list nesting only in countainment level integer def levelsOflist(lst): + """calc list nesting only in countainment level integer""" level = 1 for n in lst: if n and isinstance(n, (list, tuple)): @@ -500,7 +242,7 @@ def levelsOflist(lst): def Matrix_listing(prop): - # matrix degenerate + """Convert Matrix() into Sverchok data""" mat_out = [] for i, matrix in enumerate(prop): unit = [] @@ -512,6 +254,7 @@ def Matrix_listing(prop): def Matrix_generate(prop): + """Generate Matrix() data from Sverchok data""" mat_out = [] for i, matrix in enumerate(prop): unit = Matrix() @@ -523,6 +266,7 @@ def Matrix_generate(prop): def Matrix_location(prop, list=False): + """return a list of locations represeting the translation of the matrices""" Vectors = [] for p in prop: if list: @@ -533,6 +277,7 @@ def Matrix_location(prop, list=False): def Matrix_scale(prop, list=False): + """return a Vector()/list represeting the scale factor of the matrices""" Vectors = [] for p in prop: if list: @@ -542,9 +287,10 @@ def Matrix_scale(prop, list=False): return [Vectors] -# returns (Vector, rotation) utility function for Matrix Destructor. if list is true -# the Vector is decomposed into tuple format. def Matrix_rotation(prop, list=False): + """return (Vector, rotation) utility function for Matrix Destructor. + if list is true the Vector() is decomposed into tuple format. + """ Vectors = [] for p in prop: q = p.to_quaternion() @@ -557,10 +303,12 @@ def Matrix_rotation(prop, list=False): def Vector_generate(prop): + """return a list of Vector() objects from a standard Sverchok data""" return [[Vector(v) for v in obj] for obj in prop] def Vector_degenerate(prop): + """return a simple list of values instead of Vector() objects""" return [[v[0:3] for v in obj] for obj in prop] @@ -617,236 +365,34 @@ def matrixdef(orig, loc, scale, rot, angle, vec_angle=[[]]): return modif -##################################################### -#################### lists magic #################### -##################################################### - - -def create_list(x, y): - if type(y) in [list, tuple]: - return reduce(create_list, y, x) - else: - return x.append(y) or x - +#### +#### random stuff +#### def enum_item(s): + """return a list usable in enum property from a list with one value""" s = [(i,i,"") for i in s] return s -def preobrazovatel(list_a, levels, level2=1): - list_tmp = [] - level = levels[0] - - if level > level2: - if type(list_a)in [list, tuple]: - for l in list_a: - if type(l) in [list, tuple]: - tmp = preobrazovatel(l, levels, level2+1) - if type(tmp) in [list, tuple]: - list_tmp.extend(tmp) - else: - list_tmp.append(tmp) - else: - list_tmp.append(l) - - elif level == level2: - if type(list_a) in [list, tuple]: - for l in list_a: - if len(levels) == 1: - tmp = preobrazovatel(l, levels, level2+1) - else: - tmp = preobrazovatel(l, levels[1:], level2+1) - list_tmp.append(tmp if tmp else l) - - else: - if type(list_a) in [list, tuple]: - list_tmp = reduce(create_list, list_a, []) - - return list_tmp - - -def myZip(list_all, level, level2=0): - if level == level2: - if type(list_all) in [list, tuple]: - list_lens = [] - list_res = [] - for l in list_all: - if type(l) in [list, tuple]: - list_lens.append(len(l)) - else: - list_lens.append(0) - if list_lens == []: - return False - min_len = min(list_lens) - for value in range(min_len): - lt = [] - for l in list_all: - lt.append(l[value]) - t = list(lt) - list_res.append(t) - return list_res - else: - return False - elif level > level2: - if type(list_all) in [list, tuple]: - list_res = [] - list_tr = myZip(list_all, level, level2+1) - if list_tr is False: - list_tr = list_all - t = [] - for tr in list_tr: - if type(list_tr) in [list, tuple]: - list_tl = myZip(tr, level, level2+1) - if list_tl is False: - list_tl = list_tr - t.extend(list_tl) - list_res.append(list(t)) - return list_res - else: - return False - - -##################################################### -################### update List join magic ########## -##################################################### - - -def myZip_2(list_all, level, level2=1): - def create_listDown(list_all, level): - def subDown(list_a, level): - list_b = [] - for l2 in list_a: - if type(l2) in [list, tuple]: - list_b.extend(l2) - else: - list_b.append(l2) - if level > 1: - list_b = subDown(list_b, level-1) - return list_b - - list_tmp = [] - if type(list_all) in [list, tuple]: - for l in list_all: - list_b = subDown(l, level-1) - list_tmp.append(list_b) - else: - list_tmp = list_all - return list_tmp - - list_tmp = list_all.copy() - for x in range(level-1): - list_tmp = create_listDown(list_tmp, level) - - list_r = [] - l_min = [] - - for el in list_tmp: - if type(el) not in [list, tuple]: - break - - l_min.append(len(el)) - - if l_min == []: - l_min = [0] - lm = min(l_min) - for elm in range(lm): - for el in list_tmp: - list_r.append(el[elm]) - - list_tmp = list_r - - for lev in range(level-1): - list_tmp = [list_tmp] - - return list_tmp - - -def joiner(list_all, level, level2=1): - list_tmp = [] - - if level > level2: - if type(list_all) in [list, tuple]: - for list_a in list_all: - if type(list_a) in [list, tuple]: - list_tmp.extend(list_a) - else: - list_tmp.append(list_a) - else: - list_tmp = list_all - - list_res = joiner(list_tmp, level, level2=level2+1) - list_tmp = [list_res] - - if level == level2: - if type(list_all) in [list, tuple]: - for list_a in list_all: - if type(list_a) in [list, tuple]: - list_tmp.extend(list_a) - else: - list_tmp.append(list_a) - else: - list_tmp.append(list_all) - - if level < level2: - if type(list_all) in [list, tuple]: - for l in list_all: - list_tmp.append(l) - else: - list_tmp.append(l) - - return list_tmp - - -def wrapper_2(l_etalon, list_a, level): - def subWrap(list_a, level, count): - list_b = [] - if level == 1: - if len(list_a) == count: - for l in list_a: - list_b.append([l]) - else: - dc = len(list_a)//count - for l in range(count): - list_c = [] - for j in range(dc): - list_c.append(list_a[l*dc+j]) - list_b.append(list_c) - else: - for l in list_a: - list_b = subWrap(l, level-1, count) - return list_b - - def subWrap_2(l_etalon, len_l, level): - len_r = len_l - if type(l_etalon) in [list, tuple]: - len_r = len(l_etalon) * len_l - if level > 1: - len_r = subWrap_2(l_etalon[0], len_r, level-1) - - return len_r - - len_l = len(l_etalon) - lens_l = subWrap_2(l_etalon, 1, level) - list_tmp = subWrap(list_a, level, lens_l) - - for l in range(level-1): - list_tmp = [list_tmp] - return list_tmp - - ##################################################### ############### debug settings magic ################ ##################################################### def sverchok_debug(mode): + """ + set debug mode to mode + """ global DEBUG_MODE DEBUG_MODE = mode return DEBUG_MODE def setup_init(): + """ + setup variables needed for sverchok to function + """ global DEBUG_MODE global HEAT_MAP global SVERCHOK_NAME @@ -866,6 +412,9 @@ def setup_init(): def heat_map_state(state): + """ + colors the nodes based on execution time + """ global HEAT_MAP HEAT_MAP = state sv_ng = [ng for ng in bpy.data.node_groups if ng.bl_idname == 'SverchCustomTreeType'] @@ -894,7 +443,6 @@ def heat_map_state(state): def updateNode(self, context): """ - Old, use process_node instead When a node has changed state and need to call a partial update. For example a user exposed bpy.prop """ @@ -904,7 +452,7 @@ def updateNode(self, context): ############################################################## ############## changable type of socket magic ################ ########### if you have separate socket solution ############# -#################### wellcome to provide ##################### +#################### welcome to provide ##################### ############################################################## ############################################################## @@ -935,14 +483,10 @@ def changable_sockets(node, inputsocketname, outputsocketname): ng.links.new(to_socket, new_out_socket) node.id_data.unfreeze(hard=True) -def get_socket_type_full(node, inputsocketname): - socket = node.inputs[inputsocketname] - other = get_other_socket(socket) - return other.links[0].from_socket.bl_idname def replace_socket(socket, new_type, new_name=None, new_pos=None): ''' - Replace a socket and keep links + Replace a socket with a socket of new_type and keep links ''' if new_name is None: new_name = socket.name @@ -1075,135 +619,198 @@ def multi_socket(node, min=1, start=0, breck=False, out_count=None): node.outputs.remove(node.outputs[-1]) ng.unfreeze(True) + ##################################### -# node and socket id functions # +# socket data cache # ##################################### -# socket.name is not unique... identifier is -def socket_id(socket): - #return hash(socket) - return str(hash(socket.id_data.name + socket.node.name + socket.identifier))+socket.node.name+socket.name +def SvGetSocketAnyType(self, socket, default=None, deepcopy=True): + """Old interface, don't use""" + return socket.sv_get(default, deepcopy) + + +def SvSetSocketAnyType(self, socket_name, out): + """Old interface, don't use""" + + self.outputs[socket_name].sv_set(out) -# For when need a key for use with dict in node -# create a string property like this. -# n_id = StringProperty(default='') -# And a copy function -# def copy(self,node) -# self.n_id='' -# the always use like this -# n_id = node_id(self) -# node_dict[n_id]['key'] +def socket_id(socket): + """return an usable and semi stable hash""" + return socket.socket_id def node_id(node): - if not node.n_id: - node.n_id = str(hash(node) ^ hash(time.monotonic())) - return node.n_id + """return a stable hash for the lifetime of the node + needs StringProperty called n_id in the node + """ + return node.node_id -##################################### -# socket data cache # -##################################### +############### +# decorators! +############### +# not used but kept... -def SvGetSocketAnyType(self, socket, default=None, deepcopy=True): - out = SvGetSocket(socket, deepcopy) - if socket.is_linked: - return SvGetSocket(socket, deepcopy) - elif default: - return default +def checking_links(process): + '''Decorator for process method of node. + This decorator does stanard checks for mandatory input and output links. + ''' + + def real_process(node): + # check_mandatory_links() node method should return True + # if all mandatory inputs and outputs are linked. + # If it returns False then node will just skip processing. + if hasattr(node, "check_mandatory_links"): + if not node.check_mandatory_links(): + return + else: + # If check_mandatory_links() method is not defined, then node can + # define list of mandatory inputs and/or outputs. + # Node will skip processing if any of mandatory inputs is not linked. + # It will also skip processing if none of mandatory outputs is linked. + if hasattr(node, "input_descriptors"): + mandatory_inputs = [descriptor.name for descriptor in node.input_descriptors if descriptor.is_mandatory] + if not all([node.inputs[name].is_linked for name in mandatory_inputs]): + print("Node {}: skip processing: not all of mandatory inputs {} are linked.".format(node.name, mandatory_inputs)) + return + if hasattr(node, "output_descriptors"): + mandatory_outputs = [descriptor.name for descriptor in node.output_descriptors if descriptor.is_mandatory] + if not any([node.outputs[name].is_linked for name in mandatory_outputs]): + print("Node {}: skip processing: none of mandatory outputs {} are linked.".format(node.name, mandatory_outputs)) + return + + return process(node) + + real_process.__name__ = process.__name__ + real_process.__doc__ = process.__doc__ + return real_process + +def iterate_process(method, matcher, *inputs, node=None): + '''Shortcut function for usual iteration over set of input lists. + + This is shortcut for boilerplate code like + + res1 = [] + res2 = [] + params = match_long_repeat([input1,input2]) + for i1, i2 in zip(*params): + r1,r2 = self.method(i1,i2) + res1.append(r1) + res2.append(r2) + return res1, res2 + ''' + + data = matcher(inputs) + if node is None: + results = [list(method(*d)) for d in zip(*data)] else: - raise LookupError + results = [list(method(node, *d)) for d in zip(*data)] + return list(zip(*results)) +class Input(object): + '''Node input socket metainformation descriptor.''' -def SvSetSocketAnyType(self, socket_name, out): - SvSetSocket(self.outputs[socket_name], out) + def __init__(self, socktype, name, identifier=None, is_mandatory=True, default=sentinel, deepcopy=True): + self.socktype = socktype + self.name = name + self.identifier = identifier if identifier is not None else name + self.default = default + self.deepcopy = deepcopy + self.is_mandatory = is_mandatory -# faster than builtin deep copy for us. -# useful for our limited case -# we should be able to specify vectors here to get them create -# or stop destroying them when in vector socket. + def __str__(self): + return self.name + def create(self, node): + return node.inputs.new(self.socktype, self.name, self.identifier) -def sv_deep_copy(lst): - if isinstance(lst, (list, tuple)): - if lst and not isinstance(lst[0], (list, tuple)): - return lst[:] - return [sv_deep_copy(l) for l in lst] - return lst + def get(self, node): + return node.inputs[self.name].sv_get(default=self.default, deepcopy=self.deepcopy) +class Output(object): + '''Node output socket metainformation descriptor.''' -# Build string for showing in socket label -def SvGetSocketInfo(socket): + def __init__(self, socktype, name, is_mandatory=True): + self.socktype = socktype + self.name = name + self.is_mandatory = is_mandatory - global socket_data_cache - ng = socket.id_data.name + def __str__(self): + return self.name - if socket.is_output: - s_id = socket_id(socket) - elif socket.links: - s_id = socket_id(get_other_socket(socket)) - else: - return '' - if ng in socket_data_cache: - if s_id in socket_data_cache[ng]: - data = socket_data_cache[ng][s_id] - if data: - return str(len(data)) - return '' + def create(self, node): + node.outputs.new(self.socktype, self.name) + def set(self, node, value): + if node.outputs[self.name].is_linked: + node.outputs[self.name].sv_set(value) -def SvSetSocket(socket, out): - global socket_data_cache - if not socket.is_output: - print("Warning, {} setting input socket: {}".format(socket.node.name, socket.name)) - if not socket.is_linked: - print("Warning: {} setting unconncted socket: {}".format(socket.node.name, socket.name)) - s_id = socket_id(socket) - s_ng = socket.id_data.name - if s_ng not in socket_data_cache: - socket_data_cache[s_ng] = {} - socket_data_cache[s_ng][s_id] = out +def match_inputs(matcher, inputs, outputs): + '''Decorator for inputs/outputs boilerplate. + Usage: -def SvGetSocket(socket, deepcopy=True): - global socket_data_cache - global DEBUG_MODE - if socket.is_linked: - other = get_other_socket(socket) - s_id = socket_id(other) - s_ng = other.id_data.name - if s_ng not in socket_data_cache: - raise LookupError - if s_id in socket_data_cache[s_ng]: - out = socket_data_cache[s_ng][s_id] - if deepcopy: - return sv_deep_copy(out) - else: - return out - else: - if DEBUG_MODE: - print("cache miss:", socket.node.name, "->", socket.name, "from:", other.node.name, "->", other.name) - raise SvNoDataError - # not linked - raise SvNoDataError + @match_inputs(match_long_repeat, + inputs=[Input(...), Input(...)], + outputs=[Output(...), Output(...)]) + def process(self, i1, i2): + ... + return res1, res2 -class SvNoDataError(LookupError): - pass + This is shortcut for code like -def reset_socket_cache(ng): - """ - Reset socket cache either for node group. - """ - global socket_data_cache - socket_data_cache[ng.name] = {} + def process(self): + i1s = self.inputs['i1'].sv_get(..) + i2s = self.inputs['i2'].sv_get(..) + res1 = [] + res2 = [] + + params = match_long_repeat([i1s, i2s]) + for i1,i2 in zip(*params): + ... + res1.append(r1) + res2.append(r2) + if self.outputs['r1'].is_linked: + self.outputs['r1'].sv_set(res1) + if self.outputs['r2'].is_linked: + self.outputs['r2'].sv_set(res2) + ''' -#################################### -# быстрый сортировщик / quick sorter -#################################### + def decorator(process): + def real_process(node): + inputs_data = [input_descriptor.get(node) for input_descriptor in inputs] + results = iterate_process(process, matcher, *inputs_data, node=node) + for result, output_descriptor in zip(results, outputs): + output_descriptor.set(node, result) -def svQsort(L): - if L: return svQsort([x for x in L[1:] if x=L[0]]) - return [] + real_process.__name__ = process.__name__ + real_process.__doc__ = process.__doc__ + + return real_process + + return decorator + +def std_links_processing(matcher): + '''Shortcut decorator for "standard" inputs/outputs sockets processing routine. + + This is shortcut for combination of @checking_links and @match_inputs. + Inputs and outputs descriptors are taken from node.input_descriptors and + node.output_descriptors correspondingly. + ''' + + def decorator(process): + def real_process(node): + nonlocal process + process = match_inputs(matcher, node.input_descriptors, node.output_descriptors)(process) + process = checking_links(process) + return process(node) + + real_process.__name__ = process.__name__ + real_process.__doc__ = process.__doc__ + + return real_process + + return decorator diff --git a/node_tree.py b/node_tree.py index 2ee1ffc52751177751cd6d10d4da2a81fe053c4e..4bc1105af9c66acd9407e0fb93c5723a2c77ebeb 100644 --- a/node_tree.py +++ b/node_tree.py @@ -26,10 +26,15 @@ from bpy.types import NodeTree, NodeSocket, NodeSocketStandard from sverchok import data_structure from sverchok.data_structure import ( + updateNode, + get_other_socket, + socket_id, + replace_socket) + +from sverchok.core.socket_data import ( SvGetSocketInfo, SvGetSocket, SvSetSocket, - updateNode, SvNoDataError, sentinel) @@ -48,6 +53,7 @@ from sverchok.core.socket_conversions import ( from sverchok.ui import color_def def process_from_socket(self, context): + """Update function of exposed properties in Sockets""" self.node.process_node(context) @@ -69,9 +75,14 @@ class SvSocketCommon: if self.prop_name: setattr(self.node, self.prop_name, value) + @property + def socket_id(self): + """Id of socket used by data_cache""" + return str(hash(self.id_data.name + self.node.name + self.identifier)) @property def index(self): + """Index of socket""" node = self.node sockets = node.outputs if self.is_output else node.inputs for i, s in enumerate(sockets): @@ -79,11 +90,16 @@ class SvSocketCommon: return i def sv_set(self, data): + """Set output data""" SvSetSocket(self, data) + def replace_socket(self, new_type, new_name=None): + """Replace a socket with a socket of new_type and keep links, + return the new socket, the old reference might be invalid""" + return replace_socket(self, new_type, new_name) + class MatrixSocket(NodeSocket, SvSocketCommon): '''4x4 matrix Socket type''' - # ref: http://urchn.org/post/nodal-transform-experiment bl_idname = "MatrixSocket" bl_label = "Matrix Socket" prop_name = StringProperty(default='') @@ -95,9 +111,9 @@ class MatrixSocket(NodeSocket, SvSocketCommon): def sv_get(self, default=sentinel, deepcopy=True): self.num_matrices = 0 if self.is_linked and not self.is_output: - + if is_vector_to_matrix(self): - # this means we're going to get a flat list of the incoming + # this means we're going to get a flat list of the incoming # locations and convert those into matrices proper. out = get_matrices_from_locs(SvGetSocket(self, deepcopy=True)) self.num_matrices = len(out) @@ -149,7 +165,7 @@ class VerticesSocket(NodeSocket, SvSocketCommon): if is_matrix_to_vector(self): out = get_locs_from_matrices(SvGetSocket(self, deepcopy=True)) return out - + return SvGetSocket(self, deepcopy) @@ -382,6 +398,11 @@ class SverchCustomTreeNode: def poll(cls, ntree): return ntree.bl_idname in ['SverchCustomTreeType', 'SverchGroupTreeType'] + @property + def node_id(self): + if not self.n_id: + self.n_id = str(hash(self) ^ hash(time.monotonic())) + return self.n_id def mark_error(self, err): """ diff --git a/nodes/list_main/decompose.py b/nodes/list_main/decompose.py index d275b75ba6ee3f712e1901a69c2fb854d3051c16..33fc22e431b720c65b069f29765d36e8514afdad 100644 --- a/nodes/list_main/decompose.py +++ b/nodes/list_main/decompose.py @@ -21,8 +21,7 @@ from bpy.props import BoolProperty, IntProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import (levelsOflist, multi_socket, changable_sockets, - get_socket_type_full, SvSetSocket, SvGetSocketAnyType, - updateNode, get_other_socket) + updateNode, get_other_socket) from sverchok.core import update_system @@ -35,30 +34,30 @@ class SvListDecomposeNode(bpy.types.Node, SverchCustomTreeNode): # two veriables for multi socket input base_name = StringProperty(default='data') multi_socket_type = StringProperty(default='StringsSocket') - + def auto_count(self): data = self.inputs['data'].sv_get(default="not found") other = get_other_socket(self.inputs['data']) if other and data == "not found": update_system.process_to_node(other.node) data = self.inputs['data'].sv_get() - + leve = levelsOflist(data) if leve+1 < self.level: self.level = leve+1 result = self.beat(data, self.level) self.count = min(len(result), 16) - + def set_count(self, context): other = get_other_socket(self.inputs[0]) if not other: return self.multi_socket_type = other.bl_idname multi_socket(self, min=1, start=0, breck=True, out_count=self.count) - + level = IntProperty(name='level', default=1, min=1, update=updateNode) - + count = IntProperty(name='Count', default=1, min=1, max=16, update=set_count) @@ -69,9 +68,9 @@ class SvListDecomposeNode(bpy.types.Node, SverchCustomTreeNode): row.prop(self, 'count') op = row.operator("node.sverchok_text_callback",text="Auto set") op.fn_name="auto_count" - + def sv_init(self, context): - self.inputs.new('StringsSocket', "data") + self.inputs.new('StringsSocket', "data") self.outputs.new('StringsSocket', "data[0]") @@ -84,9 +83,9 @@ class SvListDecomposeNode(bpy.types.Node, SverchCustomTreeNode): outputsocketname = [name.name for name in self.outputs] changable_sockets(self, 'data', outputsocketname) - + def process(self): - data = SvGetSocketAnyType(self, self.inputs['data']) + data = self.inputs['data'].sv_get() result = self.beat(data, self.level) for out, socket in zip(result, self.outputs[:30]): if socket.is_linked: diff --git a/nodes/list_main/join.py b/nodes/list_main/join.py index 9b623f3a852676b34373cbbe42a4f10eebb7048c..a16432a4bbb995614ac8bc70ec982521e14ecc0a 100644 --- a/nodes/list_main/join.py +++ b/nodes/list_main/join.py @@ -19,10 +19,10 @@ import bpy from bpy.props import BoolProperty, IntProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode -from sverchok.data_structure import (changable_sockets, joiner, multi_socket, - myZip_2, wrapper_2, updateNode, +from sverchok.data_structure import (changable_sockets, multi_socket, updateNode, SvGetSocketAnyType, SvSetSocketAnyType) +from sverchok.utils.listutils import joiner, myZip_2, wrapper_2 class ListJoinNode(bpy.types.Node, SverchCustomTreeNode): ''' ListJoin node ''' @@ -70,7 +70,7 @@ class ListJoinNode(bpy.types.Node, SverchCustomTreeNode): slots = [] for socket in self.inputs: if socket.is_linked: - slots.append(SvGetSocketAnyType(self, socket)) + slots.append(socket.sv_get()) if len(slots) == 0: return @@ -88,7 +88,7 @@ class ListJoinNode(bpy.types.Node, SverchCustomTreeNode): list_wrap_mix = wrapper_2(slots, list_mix, self.JoinLevel) result = list_wrap_mix.copy() - SvSetSocketAnyType(self, 'data', result) + self.outputs[0].sv_set(result) def register(): bpy.utils.register_class(ListJoinNode) diff --git a/nodes/list_main/levels.py b/nodes/list_main/levels.py index f41ca4b7ee971212f7d3459b3c654806bf6c52a6..dfff3c489f6b2a98481eb546c31155abb3101dbb 100644 --- a/nodes/list_main/levels.py +++ b/nodes/list_main/levels.py @@ -21,9 +21,11 @@ from ast import literal_eval import bpy from bpy.props import BoolProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode -from sverchok.data_structure import (changable_sockets, preobrazovatel, updateNode, +from sverchok.data_structure import (changable_sockets, updateNode, SvSetSocketAnyType, SvGetSocketAnyType) +from sverchok.utils.listutils import preobrazovatel + class ListLevelsNode(bpy.types.Node, SverchCustomTreeNode): ''' Lists Levels node ''' @@ -52,7 +54,7 @@ class ListLevelsNode(bpy.types.Node, SverchCustomTreeNode): inputsocketname = 'data' outputsocketname = ['data', ] changable_sockets(self, inputsocketname, outputsocketname) - + def process(self): if self.outputs['data'].is_linked: data = SvGetSocketAnyType(self, self.inputs['data']) diff --git a/nodes/list_main/zip.py b/nodes/list_main/zip.py index 26af720fde317f8f31c027dacf3e47690e802aa0..3e5789950f3a769cad7fa3424e4d84f20990a79d 100644 --- a/nodes/list_main/zip.py +++ b/nodes/list_main/zip.py @@ -20,8 +20,8 @@ import bpy from bpy.props import BoolProperty, IntProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode -from sverchok.data_structure import (changable_sockets, multi_socket, preobrazovatel, - SvSetSocketAnyType, SvGetSocketAnyType, updateNode) +from sverchok.data_structure import changable_sockets, multi_socket, updateNode +from sverchok.utils.listutils import preobrazovatel class ZipNode(bpy.types.Node, SverchCustomTreeNode): @@ -33,14 +33,14 @@ class ZipNode(bpy.types.Node, SverchCustomTreeNode): level = IntProperty(name='level', default=1, min=1, update=updateNode) typ = StringProperty(name='typ', default='') newsock = BoolProperty(name='newsock', default=False) - + unwrap = BoolProperty( name='unwrap', description='unwrap objects?', default=False, update=updateNode ) - + base_name = 'data ' multi_socket_type = 'StringsSocket' @@ -68,13 +68,13 @@ class ZipNode(bpy.types.Node, SverchCustomTreeNode): slots = [] for socket in self.inputs: if socket.is_linked: - slots.append(SvGetSocketAnyType(self, socket)) + slots.append(socket.sv_get()) if len(slots) < 2: return output = self.myZip(slots, self.level) if self.unwrap: output = preobrazovatel(output, [2, 3]) - SvSetSocketAnyType(self, 'data', output) + self.outputs[0].sv_set(output) def myZip(self, list_all, level, level2=0): if level == level2: diff --git a/nodes/list_struct/sort_mk2.py b/nodes/list_struct/sort_mk2.py index 75630fa3ed7931b6c848a0ff2bf4833bb7a5aa2f..cc8a62bbf380d34158cfa923ccdde697c7f4e07b 100644 --- a/nodes/list_struct/sort_mk2.py +++ b/nodes/list_struct/sort_mk2.py @@ -21,7 +21,7 @@ from bpy.props import BoolProperty, IntProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import (updateNode, changable_sockets, - dataCorrect, svQsort, levelsOflist) + dataCorrect, levelsOflist) class ListSortNodeMK2(bpy.types.Node, SverchCustomTreeNode): ''' List Sort MK2 ''' @@ -62,7 +62,7 @@ class ListSortNodeMK2(bpy.types.Node, SverchCustomTreeNode): out_ = [] if not self.inputs['keys'].is_linked: for obj in data: - out_.append(svQsort(obj)) + out_.append(sorted(obj)) else: keys_ = self.inputs['keys'].sv_get() keys = dataCorrect(keys_, nominal_dept=1) diff --git a/nodes/logic/neuro_elman.py b/nodes/logic/neuro_elman.py index d0a3dc1a421c785969cd8e3c8abe1f48a162607d..d5307043d2881032cfdebee90f3d39e5cb6455fb 100644 --- a/nodes/logic/neuro_elman.py +++ b/nodes/logic/neuro_elman.py @@ -23,7 +23,7 @@ from bpy.props import BoolProperty, IntProperty, StringProperty, FloatProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import (updateNode, changable_sockets, - dataCorrect, svQsort, + dataCorrect, SvSetSocketAnyType, SvGetSocketAnyType) from sverchok.data_structure import handle_read, handle_write @@ -33,8 +33,8 @@ from cmath import exp class SvNeuro_Elman: - - + + def init_w(self, number, ext, treshold): out = [] for n in range(number): @@ -42,22 +42,22 @@ class SvNeuro_Elman: for e in range(ext): tmp.append(uniform(-treshold, treshold)) out.append(tmp) - + return out - - - def sigmoida(self, x, a): + + + def sigmoida(self, x, a): if a==0: b=1 else: b = 1/a return 1/(1+exp(-b*x).real+1e-8) - - def neuro(self, list_in, etalon, maxim, learning, prop): + + def neuro(self, list_in, etalon, maxim, learning, prop): outA = self.layerA(list_in, prop) outB = self.layerB(outA, prop) outC = self.layerC(outB, prop) - + if learning: lin = len(etalon) if linabs(maxim/2): break list_x[idx] = xi - + outB_ = self.layerB(list_x, prop) outC_ = self.layerC(outB, prop) - + prop['wA'] = list_wA prop['wB'] = list_wB - + class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): @@ -174,7 +174,7 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): bl_idname = 'SvNeuroElman1LNode' bl_label = '*Neuro Elman 1 Layer' bl_icon = 'OUTLINER_OB_EMPTY' - + Elman = SvNeuro_Elman() k_learning = FloatProperty(name='k_learning', default=0.1, @@ -186,7 +186,7 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): maximum = FloatProperty(name='maximum', default=3.0, update=updateNode) - menushka = BoolProperty(name='menushka', + menushka = BoolProperty(name='menushka', default=False) epsilon = FloatProperty(name='epsilon', default=1.0, @@ -202,15 +202,15 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): lA = IntProperty(name='lA', default=1, min = 0, update=updateNode) lB = IntProperty(name='lB', default=5, min = 0, update=updateNode) lC = IntProperty(name='lC', default=1, min = 0, update=updateNode) - - + + def sv_init(self, context): self.inputs.new('StringsSocket', "data", "data") self.inputs.new('StringsSocket', "etalon", "etalon") self.outputs.new('StringsSocket', "result", "result") - - + + def draw_buttons(self, context, layout): handle_name = self.name + self.id_data.name layout.prop(self, "k_learning", text="koeff learning") @@ -235,8 +235,8 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): col.prop(self, "k_lambda", text="lambda") col = layout.column(align=True) col.prop(self, "treshold", text="treshold") - - + + def process(self): handle_name = self.name + self.id_data.name handle = handle_read(handle_name) @@ -253,25 +253,25 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): 'cycles':3, 'trashold':0.01, 'k_lambda':0.0001} - + props['wA'] = self.Elman.init_w(props['InA'], props['InB'], props['trashold']) props['wB'] = self.Elman.init_w(props['InB'], props['InC'], props['trashold']) - - + + self.Elman.gister = abs(self.gisterezis) self.Elman.k_learning = self.k_learning - + result = [] if 'result' in self.outputs and len(self.outputs['result'].links) > 0 \ and 'data' in self.inputs and len(self.inputs['data'].links) > 0: - + if 'etalon' in self.inputs and len(self.inputs['etalon'].links) > 0: etalon = SvGetSocketAnyType(self, self.inputs['etalon'])[0] flag = True else: flag = False etalon = [[0]] - + if (props['InA']!=self.lA+1) or props['InB']!=self.lB or \ props['InC']!=self.lC: props['InA'] = self.lA+1 @@ -279,14 +279,14 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): props['InC'] = self.lC props['wA'] = self.Elman.init_w(props['InA'], props['InB']) props['wB'] = self.Elman.init_w(props['InB'], props['InC']) - + props['gister'] = self.gisterezis props['k_learning'] = self.k_learning props['epsilon'] = self.epsilon props['k_lambda'] = self.k_lambda props['cycles'] = self.cycles props['trashold'] = self.treshold - + data_ = SvGetSocketAnyType(self, self.inputs['data'])[0] if type(etalon[0]) not in [list, tuple]: etalon = [etalon] if type(data_[0]) not in [list, tuple]: data_ = [data_] @@ -296,10 +296,10 @@ class SvNeuroElman1LNode(bpy.types.Node, SverchCustomTreeNode): data2 = [1.0]+data if type(eta) not in [list, tuple]: eta = [eta] result.append([self.Elman.neuro(data2, eta, self.maximum, flag, props)]) - + else: result = [[[]]] - + handle_write(handle_name, props) SvSetSocketAnyType(self, 'result', result) @@ -313,10 +313,10 @@ class SvNeuroOps(bpy.types.Operator): bl_idname = "node.sverchok_neuro" bl_label = "Sverchok Neuro operators" bl_options = {'REGISTER', 'UNDO'} - + typ = IntProperty(name = 'typ', default=0) handle_name = StringProperty(name='handle') - + def execute(self, context): if self.typ == 1: handle = handle_read(self.handle_name) @@ -326,11 +326,11 @@ class SvNeuroOps(bpy.types.Operator): prop['wA']=Elman.init_w(prop['InA'], prop['InB'], prop['trashold']) prop['wB']=Elman.init_w(prop['InB'], prop['InC'], prop['trashold']) handle_write(self.handle_name, prop) - + return {'FINISHED'} - - - + + + @@ -342,4 +342,3 @@ def register(): def unregister(): bpy.utils.unregister_class(SvNeuroElman1LNode) bpy.utils.unregister_class(SvNeuroOps) - diff --git a/nodes/number/formula2.py b/nodes/number/formula2.py index 15be8a5ba945cd7f17cda22ac9025e702ad4d064..1c6a4365262f723f565e65935a74f1bc87b4dc39 100644 --- a/nodes/number/formula2.py +++ b/nodes/number/formula2.py @@ -31,7 +31,7 @@ from bpy.props import BoolProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import ( - sv_Vars, updateNode, multi_socket, changable_sockets, + updateNode, multi_socket, changable_sockets, dataSpoil, dataCorrect, levelsOflist, SvSetSocketAnyType, SvGetSocketAnyType ) @@ -72,8 +72,8 @@ class Formula2Node(bpy.types.Node, SverchCustomTreeNode): inputsocketname = 'X' outputsocketname = ['Result'] changable_sockets(self, inputsocketname, outputsocketname) - - + + def process(self): if self.inputs['X'].is_linked: vecs = SvGetSocketAnyType(self, self.inputs['X']) @@ -84,7 +84,7 @@ class Formula2Node(bpy.types.Node, SverchCustomTreeNode): # outputs if not self.outputs['Result'].is_linked: return - + list_mult = [] if self.inputs['n[0]'].is_linked: i = 0 @@ -134,6 +134,7 @@ class Formula2Node(bpy.types.Node, SverchCustomTreeNode): X = x n = [] a = [] + sv_Vars = {} #is dead for long time, but not sure about this list_vars = [w for w in sv_Vars.keys()] for v in list_vars: if v[:6] == 'sv_typ': diff --git a/nodes/viz/viewer_mk2.py b/nodes/viz/viewer_mk2.py index aca778df2aadbe2d6ad1fe1629ef72a740ab9c6d..94d280ceb6134b9b9a95e5518b7440d481564353 100644 --- a/nodes/viz/viewer_mk2.py +++ b/nodes/viz/viewer_mk2.py @@ -27,11 +27,12 @@ import sverchok from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import ( - cache_viewer_baker, node_id, updateNode, dataCorrect, + node_id, updateNode, dataCorrect, Vector_generate, Matrix_generate) from sverchok.ui.viewer_draw_mk2 import callback_disable, callback_enable -# from nodes.basic_view.viewer import SvObjBake + +cache_viewer_baker = {} sock_dict = { 'v': 'VerticesSocket', @@ -370,7 +371,7 @@ class ViewerNode2(bpy.types.Node, SverchCustomTreeNode): # new connections and processing, it could auto rewire s->s v->v m->m. def check_origin(to_socket, socket_type): origin_socket_bl_idname = inputs[to_socket].links[0].from_socket.bl_idname - + if isinstance(socket_type, str): return origin_socket_bl_idname == sock_dict.get(socket_type) else: @@ -461,4 +462,4 @@ def unregister(): del bpy.types.Scene.sv_light_direction if __name__ == '__main__': - register() \ No newline at end of file + register() diff --git a/old_nodes/sort.py b/old_nodes/sort.py index 131c23c73ea8873129855d63aede5d5db4fc1c06..d0aa5349cac3076e45ffb20ace701bce7a8e6169 100644 --- a/old_nodes/sort.py +++ b/old_nodes/sort.py @@ -21,7 +21,7 @@ from bpy.props import BoolProperty, IntProperty, StringProperty from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import (updateNode, changable_sockets, - dataCorrect, svQsort) + dataCorrect) class ListSortNode(bpy.types.Node, SverchCustomTreeNode): ''' List Sort ''' @@ -55,12 +55,12 @@ class ListSortNode(bpy.types.Node, SverchCustomTreeNode): def process(self): if not self.outputs['data'].is_linked: return - + data_ = self.inputs['data'].sv_get() data = dataCorrect(data_, nominal_dept=self.level) out_ = [] for obj in data: - out_.append(svQsort(obj)) + out_.append(sorted(obj)) out = dataCorrect(out_) self.outputs['data'].sv_set(out) @@ -71,4 +71,4 @@ def register(): def unregister(): - bpy.utils.unregister_class(ListSortNode) \ No newline at end of file + bpy.utils.unregister_class(ListSortNode) diff --git a/old_nodes/viewer.py b/old_nodes/viewer.py index e616d48fb75c45ab93e947847afdd31d1d8d3ae5..a4888e00711efc5137913a2f656be1f69c4bd5e6 100644 --- a/old_nodes/viewer.py +++ b/old_nodes/viewer.py @@ -22,8 +22,7 @@ from mathutils import Matrix from sverchok.node_tree import (SverchCustomTreeNode, SvColors, StringsSocket, VerticesSocket, MatrixSocket) -from sverchok.data_structure import (cache_viewer_baker, - dataCorrect, node_id, +from sverchok.data_structure import (dataCorrect, node_id, Vector_generate, Matrix_generate, updateNode, SvGetSocketAnyType) from sverchok.ui.viewer_draw import callback_disable, callback_enable @@ -126,6 +125,7 @@ class SvObjBake(bpy.types.Operator): #print (ob.name + ' baked') return [ob, me] +cache_viewer_baker = {} class ViewerNode(bpy.types.Node, SverchCustomTreeNode): ''' ViewerNode ''' diff --git a/sockets.py b/sockets.py index 20cf031fc01df211b8fcfd67f166585941a3f341..29d097a03960bca979a638d3cd381790f1d8ee8c 100644 --- a/sockets.py +++ b/sockets.py @@ -22,7 +22,7 @@ from bpy.props import StringProperty, BoolProperty, FloatVectorProperty, IntProp from bpy.types import NodeTree, NodeSocket -from sverchok.data_structure import SvGetSocket, SvGetSocketInfo +from sverchok.core.socket_data import SvGetSocket, SvGetSocketInfo from sverchok.node_tree import SvSocketCommon, process_from_socket, sentinel @@ -141,4 +141,4 @@ def register(): def unregister(): for cls in classes: - bpy.utils.unregister_class(cls) \ No newline at end of file + bpy.utils.unregister_class(cls) diff --git a/utils/listutils.py b/utils/listutils.py new file mode 100644 index 0000000000000000000000000000000000000000..6b2dfbda458c1caa6dee1aae88944f665f7e45a3 --- /dev/null +++ b/utils/listutils.py @@ -0,0 +1,234 @@ +# ##### BEGIN GPL LICENSE BLOCK ##### +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# +# ##### END GPL LICENSE BLOCK ##### + + +from functools import reduce +##################################################### +#################### lists magic #################### +##################################################### + + +def create_list(x, y): + if type(y) in [list, tuple]: + return reduce(create_list, y, x) + else: + return x.append(y) or x + + + + + +def preobrazovatel(list_a, levels, level2=1): + list_tmp = [] + level = levels[0] + + if level > level2: + if type(list_a)in [list, tuple]: + for l in list_a: + if type(l) in [list, tuple]: + tmp = preobrazovatel(l, levels, level2+1) + if type(tmp) in [list, tuple]: + list_tmp.extend(tmp) + else: + list_tmp.append(tmp) + else: + list_tmp.append(l) + + elif level == level2: + if type(list_a) in [list, tuple]: + for l in list_a: + if len(levels) == 1: + tmp = preobrazovatel(l, levels, level2+1) + else: + tmp = preobrazovatel(l, levels[1:], level2+1) + list_tmp.append(tmp if tmp else l) + + else: + if type(list_a) in [list, tuple]: + list_tmp = reduce(create_list, list_a, []) + + return list_tmp + + +def myZip(list_all, level, level2=0): + if level == level2: + if type(list_all) in [list, tuple]: + list_lens = [] + list_res = [] + for l in list_all: + if type(l) in [list, tuple]: + list_lens.append(len(l)) + else: + list_lens.append(0) + if list_lens == []: + return False + min_len = min(list_lens) + for value in range(min_len): + lt = [] + for l in list_all: + lt.append(l[value]) + t = list(lt) + list_res.append(t) + return list_res + else: + return False + elif level > level2: + if type(list_all) in [list, tuple]: + list_res = [] + list_tr = myZip(list_all, level, level2+1) + if list_tr is False: + list_tr = list_all + t = [] + for tr in list_tr: + if type(list_tr) in [list, tuple]: + list_tl = myZip(tr, level, level2+1) + if list_tl is False: + list_tl = list_tr + t.extend(list_tl) + list_res.append(list(t)) + return list_res + else: + return False + + +##################################################### +################### update List join magic ########## +##################################################### + + +def myZip_2(list_all, level, level2=1): + def create_listDown(list_all, level): + def subDown(list_a, level): + list_b = [] + for l2 in list_a: + if type(l2) in [list, tuple]: + list_b.extend(l2) + else: + list_b.append(l2) + if level > 1: + list_b = subDown(list_b, level-1) + return list_b + + list_tmp = [] + if type(list_all) in [list, tuple]: + for l in list_all: + list_b = subDown(l, level-1) + list_tmp.append(list_b) + else: + list_tmp = list_all + return list_tmp + + list_tmp = list_all.copy() + for x in range(level-1): + list_tmp = create_listDown(list_tmp, level) + + list_r = [] + l_min = [] + + for el in list_tmp: + if type(el) not in [list, tuple]: + break + + l_min.append(len(el)) + + if l_min == []: + l_min = [0] + lm = min(l_min) + for elm in range(lm): + for el in list_tmp: + list_r.append(el[elm]) + + list_tmp = list_r + + for lev in range(level-1): + list_tmp = [list_tmp] + + return list_tmp + + +def joiner(list_all, level, level2=1): + list_tmp = [] + + if level > level2: + if type(list_all) in [list, tuple]: + for list_a in list_all: + if type(list_a) in [list, tuple]: + list_tmp.extend(list_a) + else: + list_tmp.append(list_a) + else: + list_tmp = list_all + + list_res = joiner(list_tmp, level, level2=level2+1) + list_tmp = [list_res] + + if level == level2: + if type(list_all) in [list, tuple]: + for list_a in list_all: + if type(list_a) in [list, tuple]: + list_tmp.extend(list_a) + else: + list_tmp.append(list_a) + else: + list_tmp.append(list_all) + + if level < level2: + if type(list_all) in [list, tuple]: + for l in list_all: + list_tmp.append(l) + else: + list_tmp.append(l) + + return list_tmp + + +def wrapper_2(l_etalon, list_a, level): + def subWrap(list_a, level, count): + list_b = [] + if level == 1: + if len(list_a) == count: + for l in list_a: + list_b.append([l]) + else: + dc = len(list_a)//count + for l in range(count): + list_c = [] + for j in range(dc): + list_c.append(list_a[l*dc+j]) + list_b.append(list_c) + else: + for l in list_a: + list_b = subWrap(l, level-1, count) + return list_b + + def subWrap_2(l_etalon, len_l, level): + len_r = len_l + if type(l_etalon) in [list, tuple]: + len_r = len(l_etalon) * len_l + if level > 1: + len_r = subWrap_2(l_etalon[0], len_r, level-1) + + return len_r + + len_l = len(l_etalon) + lens_l = subWrap_2(l_etalon, 1, level) + list_tmp = subWrap(list_a, level, lens_l) + + for l in range(level-1): + list_tmp = [list_tmp] + return list_tmp