JacobARose / pyleaves

Full project repo containing sub-packages for database, dataloaders, models, training, and analysis.

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

TypeError: <lambda>() takes 1 positional argument but 2 were given

piperod opened this issue · comments

It seems that there is an error in the notebook for the demo.


TypeError Traceback (most recent call last)
in ()
27 return data
28
---> 29 train_dataset = get_tf_dataset(filenames = train_data['path'].values, labels = train_data['label'].values)
30 val_dataset = get_tf_dataset(filenames = val_data['path'].values, labels = val_data['label'].values)
31

in get_tf_dataset(filenames, labels)
20 data = tf.data.Dataset.from_tensor_slices((filenames, labels))
21 data = data.shuffle(len(filenames))
---> 22 data = data.interleave(lambda x: tf.data.Dataset(x).map(parse_function, num_parallel_calls=tf.data.experimental.AUTOTUNE), cycle_length=4, block_length=16)
23 # data = data.map(train_preprocess, num_parallel_calls=4)
24 data = data.batch(batch_size)

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py in interleave(self, map_func, cycle_length, block_length, num_parallel_calls)
1824 num_parallel_calls=None):
1825 return DatasetV1Adapter(super(DatasetV1, self).interleave(
-> 1826 map_func, cycle_length, block_length, num_parallel_calls))
1827
1828 @functools.wraps(DatasetV2.filter)

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py in interleave(self, map_func, cycle_length, block_length, num_parallel_calls)
1234 """
1235 if num_parallel_calls is None:
-> 1236 return InterleaveDataset(self, map_func, cycle_length, block_length)
1237 else:
1238 return ParallelInterleaveDataset(self, map_func, cycle_length,

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py in init(self, input_dataset, map_func, cycle_length, block_length)
3290 self._input_dataset = input_dataset
3291 self._map_func = StructuredFunctionWrapper(
-> 3292 map_func, self._transformation_name(), dataset=input_dataset)
3293 if not isinstance(self._map_func.output_structure, DatasetStructure):
3294 raise TypeError(

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py in init(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs)
2553 resource_tracker = tracking.ResourceTracker()
2554 with tracking.resource_tracker_scope(resource_tracker):
-> 2555 self._function = wrapper_fn._get_concrete_function_internal()
2556 if add_to_graph:
2557 self._function.add_to_graph(ops.get_default_graph())

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal(self, *args, **kwargs)
1353 """Bypasses error checking when getting a graph function."""
1354 graph_function = self._get_concrete_function_internal_garbage_collected(
-> 1355 *args, **kwargs)
1356 # We're returning this concrete function to someone, and they may keep a
1357 # reference to the FuncGraph without keeping a reference to the

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
1347 if self.input_signature:
1348 args, kwargs = None, None
-> 1349 graph_function, _, _ = self._maybe_define_function(args, kwargs)
1350 return graph_function
1351

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
1650 graph_function = self._function_cache.primary.get(cache_key, None)
1651 if graph_function is None:
-> 1652 graph_function = self._create_graph_function(args, kwargs)
1653 self._function_cache.primary[cache_key] = graph_function
1654 return graph_function, args, kwargs

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
1543 arg_names=arg_names,
1544 override_flat_arg_shapes=override_flat_arg_shapes,
-> 1545 capture_by_value=self._capture_by_value),
1546 self._function_attributes)
1547

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
713 converted_func)
714
--> 715 func_outputs = python_func(*func_args, **func_kwargs)
716
717 # invariant: func_outputs contains only Tensors, CompositeTensors,

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py in wrapper_fn(*args)
2547 attributes=defun_kwargs)
2548 def wrapper_fn(*args): # pylint: disable=missing-docstring
-> 2549 ret = _wrapper_helper(*args)
2550 ret = self._output_structure._to_tensor_list(ret)
2551 return [ops.convert_to_tensor(t) for t in ret]

/home/irodri15/miniconda3/envs/tf_gpu/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py in _wrapper_helper(*args)
2487 nested_args = (nested_args,)
2488
-> 2489 ret = func(*nested_args)
2490 # If func returns a list of tensors, nest.flatten() and
2491 # ops.convert_to_tensor() would conspire to attempt to stack

TypeError: () takes 1 positional argument but 2 were given

Ah yes sorry! I accidentally pushed a version that was meant for debugging. I just pushed another after setting debug = False in code cell 12 and the error seems to disappear.

Closing for now, if the error persists open another Issue (or we can re-open this I dont know if it's possible). I'll also add a card to the project for some time after Thanksgiving to maybe formalize a better Git flow procedure that utilizes branches to avoid this in the future.