Mercurial > repos > bgruening > scipy_sparse
comparison keras_deep_learning.py @ 34:4efd73be98bb draft
"planemo upload for repository https://github.com/bgruening/galaxytools/tree/master/tools/sklearn commit e2a5eade6d0e5ddf3a47630381a0ad90d80e8a04"
author | bgruening |
---|---|
date | Tue, 13 Apr 2021 18:13:32 +0000 |
parents | a2da4cebc584 |
children | 318484f56b6a |
comparison
equal
deleted
inserted
replaced
33:ecd247e1ea9c | 34:4efd73be98bb |
---|---|
175 options.update(kwargs) | 175 options.update(kwargs) |
176 | 176 |
177 # merge layers | 177 # merge layers |
178 if 'merging_layers' in options: | 178 if 'merging_layers' in options: |
179 idxs = literal_eval(options.pop('merging_layers')) | 179 idxs = literal_eval(options.pop('merging_layers')) |
180 merging_layers = [all_layers[i-1] for i in idxs] | 180 merging_layers = [all_layers[i - 1] for i in idxs] |
181 new_layer = klass(**options)(merging_layers) | 181 new_layer = klass(**options)(merging_layers) |
182 # non-input layers | 182 # non-input layers |
183 elif inbound_nodes is not None: | 183 elif inbound_nodes is not None: |
184 new_layer = klass(**options)(all_layers[inbound_nodes-1]) | 184 new_layer = klass(**options)(all_layers[inbound_nodes - 1]) |
185 # input layers | 185 # input layers |
186 else: | 186 else: |
187 new_layer = klass(**options) | 187 new_layer = klass(**options) |
188 | 188 |
189 all_layers.append(new_layer) | 189 all_layers.append(new_layer) |
190 | 190 |
191 input_indexes = _handle_shape(config['input_layers']) | 191 input_indexes = _handle_shape(config['input_layers']) |
192 input_layers = [all_layers[i-1] for i in input_indexes] | 192 input_layers = [all_layers[i - 1] for i in input_indexes] |
193 | 193 |
194 output_indexes = _handle_shape(config['output_layers']) | 194 output_indexes = _handle_shape(config['output_layers']) |
195 output_layers = [all_layers[i-1] for i in output_indexes] | 195 output_layers = [all_layers[i - 1] for i in output_indexes] |
196 | 196 |
197 return Model(inputs=input_layers, outputs=output_layers) | 197 return Model(inputs=input_layers, outputs=output_layers) |
198 | 198 |
199 | 199 |
200 def get_batch_generator(config): | 200 def get_batch_generator(config): |
298 ['optimizer_selection']['optimizer_type']).lower() | 298 ['optimizer_selection']['optimizer_type']).lower() |
299 | 299 |
300 options.update((inputs['mode_selection']['compile_params'] | 300 options.update((inputs['mode_selection']['compile_params'] |
301 ['optimizer_selection']['optimizer_options'])) | 301 ['optimizer_selection']['optimizer_options'])) |
302 | 302 |
303 train_metrics = (inputs['mode_selection']['compile_params'] | 303 train_metrics = inputs['mode_selection']['compile_params']['metrics'] |
304 ['metrics']).split(',') | |
305 if train_metrics[-1] == 'none': | 304 if train_metrics[-1] == 'none': |
306 train_metrics = train_metrics[:-1] | 305 train_metrics = train_metrics[:-1] |
307 options['metrics'] = train_metrics | 306 options['metrics'] = train_metrics |
308 | 307 |
309 options.update(inputs['mode_selection']['fit_params']) | 308 options.update(inputs['mode_selection']['fit_params']) |