HoloViews/Panel - TypeError: unsupported operand type(s) for *: 'function' and 'Points' - overlay

I'm trying to create the parameter px on the holoviews.operation.datashader.spread operation interactively changeable together with an additional overlay.
The diagram with an IntSlider and the function returned by pn.bind(get_spreaded, px=px_slider) is working as expected when executing with pn.Column(px_slider, interactive)`.
But with an additional overlay, the line pn.Column(px_slider, interactive * other) reports TypeError: unsupported operand type(s) for *: 'function' and 'Points'.
How can I use the * operator with the function returned from pn.bind(...)?
Or is this the wrong way doing this? Is there a better and easier solution?
I ran the following code in jupyter lab:
import holoviews as hv
import panel as pn
import numpy as np
from holoviews.operation.datashader import rasterize, spread
import colorcet
import pandas as pd
hv.extension('bokeh')
pn.extension()
hv.opts.defaults(
hv.opts.Path(width=800, height=400),
hv.opts.Image(width=800, height=400)
)
def random_walk(n, f=200):
"""Random walk in a 2D space, smoothed with a filter of length f"""
xs = np.convolve(np.random.normal(0, 0.1, size=n), np.ones(f)/f).cumsum()
ys = np.convolve(np.random.normal(0, 0.1, size=n), np.ones(f)/f).cumsum()
xs += 0.1*np.sin(0.1*np.array(range(n-1+f))) # add wobble on x axis
xs += np.random.normal(0, 0.005, size=n-1+f) # add measurement noise
ys += np.random.normal(0, 0.005, size=n-1+f)
return np.column_stack([xs, ys])
# create a path and plot it
path = hv.Path([random_walk(10000, 30)])
path
# rasterize and show the plot
rasterized = rasterize(path).opts(colorbar=True, cmap=colorcet.fire, cnorm='log')
rasterized
# the callback for getting the spreaded plot
def get_spreaded(px=3, shape='circle'):
return spread(rasterized, px=px, shape=shape)
# show the plot returned from the callback
get_spreaded()
# create the slider for interactively changing the px value
px_slider = pn.widgets.IntSlider(name='Number of pixels to spread on all sides', start=0, end=10, value=3, step=1)
# bind the slider to the callback method
interactive = pn.bind(get_spreaded, px=px_slider)
# show only one plot without any overlay
pn.Column(px_slider, interactive)
# create data for an overlay
df = pd.DataFrame(data={'c1': [1, 2, 3, 4, 5], 'c2': [3, 4, 5, 6, 7]})
other = hv.Points(data=df)
other
# show both plots
pn.Column(px_slider, interactive * other)
The last line results in the following Error message:
#
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[159], line 1
----> 1 pn.Column(px_slider, interactive * other)
TypeError: unsupported operand type(s) for *: 'function' and 'Points'
I would expect, that there is some way to wrap the function and makes it possible to use the * operator. But I couldn't find a way yet.

Although in this particular case the return value of the function is something that HoloViews could (in principle) overlay with the other plot, HoloViews doesn't know that; the HoloViews * operator only knows how to handle HoloViews objects (Elements, HoloMaps, Layouts, and DynamicMaps), not bound Panel functions.
You could use a DynamicMap much like you used pn.bind, but here HoloViews operations already understand what to do with Panel widgets, so you can simply supply the widget to the spread operation (or any other operation's parameters):
import panel as pn, numpy as np, holoviews as hv, colorcet, pandas as pd
from holoviews.operation.datashader import rasterize, spread
hv.extension('bokeh')
pn.extension()
hv.opts.defaults(
hv.opts.Path(width=800, height=400),
hv.opts.Image(width=800, height=400)
)
def random_walk(n, f=200):
"""Random walk in a 2D space, smoothed with a filter of length f"""
xs = np.convolve(np.random.normal(0, 0.1, size=n), np.ones(f)/f).cumsum()
ys = np.convolve(np.random.normal(0, 0.1, size=n), np.ones(f)/f).cumsum()
xs += 0.1*np.sin(0.1*np.array(range(n-1+f))) # add wobble on x axis
xs += np.random.normal(0, 0.005, size=n-1+f) # add measurement noise
ys += np.random.normal(0, 0.005, size=n-1+f)
return np.column_stack([xs, ys])
# create plot with interactively controlled spreading
px_slider = pn.widgets.IntSlider(name='Number of pixels to spread on all sides',
start=0, end=10, value=3, step=1)
path = hv.Path([random_walk(10000, 30)])
rasterized = rasterize(path).opts(colorbar=True, cmap=colorcet.fire, cnorm='log')
spreaded= spread(rasterized, px=px_slider, shape='circle')
# create data for an overlay
df = pd.DataFrame(data={'c1': [1, 2, 3, 4, 5], 'c2': [3, 4, 5, 6, 7]})
other = hv.Points(data=df)
# show both plots
pn.Column(px_slider, spreaded * other)

Related

networkx: remove edge with specific attribute from multigraph

I'd like to remove a specific edge (specific color) from a MultiGraph.
How can I do that?
Following code does not work.
#!/usr/bin/env python
import matplotlib.pyplot as plt
import networkx as nx
G = nx.MultiGraph()
# the_colored_graph.add_edge(v1, v2, "red")
G.add_edge(1, 2, color="red")
G.add_edge(2, 3, color="red")
G.add_edge(4, 2, color="green")
G.add_edge(2, 4, color="blue")
print (G.edges(data=True))
# G.remove_edge(2, 4, color="green")
#
selected_edge = [(u,v) for u,v,e in G.edges(data=True) if e['color'] == 'green']
print (selected_edge)
G.remove_edge(selected_edge[0][0], selected_edge[0][1])
print (G.edges(data=True))
nx.draw(G)
plt.show()
When constructing the multigraph, assign a "key" attribute to each edge (the key could be anything that disambiguates the parallel edges - say, the color):
G.add_edge(1, 2, color="red", key='red')
Remove an edges by specifying the end nodes and the key:
G.remove_edge(1, 2, key='red')

ValueError: Cannot feed value of shape (1, 2048, 2048, 1) for Tensor 'image_tensor:0', which has shape '(?, ?, ?, 3)'

Using TensorFlow I am trying to detect one object(png and grayscale image). I have trained and exported a model.ckpt successfully. Now I am trying to restore the saved model.ckpt for prediction. Here is the script:
import numpy as np
import os
import six.moves.urllib as urllib
import sys
import tarfile
import tensorflow as tf
import zipfile
from collections import defaultdict
from io import StringIO
from matplotlib import pyplot as plt
from PIL import Image
if tf.__version__ != '1.4.0':
raise ImportError('Please upgrade your tensorflow installation to v1.4.0!')
# This is needed to display the images.
#matplotlib inline
# This is needed since the notebook is stored in the object_detection folder.
sys.path.append("..")
from utils import label_map_util
from utils import visualization_utils as vis_util
MODEL_NAME = 'melon_graph'
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = MODEL_NAME + '/frozen_inference_graph.pb'
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('training', 'object_detection.pbtxt')
NUM_CLASSES = 1
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
def load_image_into_numpy_array(image):
(im_width, im_height) = image.size
return np.array(image.getdata()).reshape((im_height, im_width, 1)).astype(np.float64)
# For the sake of simplicity we will use only 2 images:
# If you want to test the code with your images, just add path to the images to the TEST_IMAGE_PATHS.
PATH_TO_TEST_IMAGES_DIR = 'test_images'
TEST_IMAGE_PATHS = [ os.path.join(PATH_TO_TEST_IMAGES_DIR, 'te_data{}.png'.format(i)) for i in range(1, 336) ]
# Size, in inches, of the output images.
IMAGE_SIZE = (12, 8)
with detection_graph.as_default():
with tf.Session(graph=detection_graph) as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
for image_path in TEST_IMAGE_PATHS:
image = Image.open(image_path)
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
image_np = load_image_into_numpy_array(image)
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run([detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_np_expanded})
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(image_np,np.squeeze(boxes),np.squeeze(classes).astype(np.float64), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=5)
plt.figure(figsize=IMAGE_SIZE)
plt.imshow(image_np)
and this is the error
Traceback (most recent call last): File "cochlear_detection.py",
line 81, in
(boxes, scores, classes, num) = sess.run([detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded}) File
"/anaconda/lib/python3.6/site-packages/tensorflow/python/client/session.py",
line 889, in run
run_metadata_ptr) File "/anaconda/lib/python3.6/site-packages/tensorflow/python/client/session.py",
line 1096, in _run
% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape()))) ValueError: Cannot feed value of shape (1, 2048, 2048, 1) for Tensor
'image_tensor:0', which has shape '(?, ?, ?, 3)'

Fitting Integral to data

I am trying to fit data to a function f(x) that is an integral over T. x is the upper boarder of the Integral. I am trying to do it with scipy.curve_fit() but I don't know how to write my integral as a function that can be passed to curve_fit.
I had a look at similar problems but I didn't see anything that fits to my problem.
I cannot provide any guess values for A and Ea since I have no clue at all what they could be as of now.
from scipy import optimize
import matplotlib.pyplot as plt
import numpy as np
from scipy import interpolate
from scipy import integrate
class get_Ton:
def __init__(self):
self.data=np.genfromtxt('test3.csv', delimiter=',', skip_header=8)
def loop(self):
def fit_tangent():
tck = interpolate.splrep(self.x, self.y, k=2, s=0)
dev_1 = interpolate.splev(self.x, tck, der=1)
def integrand(T, A, Ea):
return A*np.exp(-Ea/(8.314*T))
def polyn(x, A, Ea):
return integrate.quad(integrand, 25, x, args=(A, Ea))[0]
vcurve = np.vectorize(polyn)
p, e = optimize.curve_fit(vcurve, self.x, self.y, [2000, 150])
xd = np.linspace(50, 70, 100)
plt.plot(self.x, self.y, "o")
plt.plot(xd, vcurve(xd, *p))
for i in range((list(np.shape(self.data)[1:2]))[0]):
if i % 2 == 0:
self.temp=self.data[:,i]
self.scat=self.data[:,i+1]
self.x=[26.192, 26.861000000000001, 27.510000000000002, 28.178000000000001, 28.856000000000002, 29.515000000000001, 30.183, 30.823, 31.5, 32.158999999999999, 32.856000000000002, 33.515000000000001, 34.145000000000003, 34.823, 35.491, 36.168999999999997, 36.837000000000003, 37.533999999999999, 38.164000000000001, 38.832000000000001, 39.481000000000002, 40.158999999999999, 40.826999999999998, 41.496000000000002, 42.164000000000001, 42.832000000000001, 43.500999999999998, 44.188000000000002, 44.837000000000003, 45.505000000000003, 46.173000000000002, 46.832000000000001, 47.500999999999998, 48.188000000000002, 48.828000000000003, 49.496000000000002, 50.173999999999999, 50.813000000000002, 51.481000000000002, 52.112000000000002, 52.808999999999997, 53.439, 54.116, 54.765999999999998, 55.453000000000003, 56.101999999999997, 56.761000000000003, 57.429000000000002, 58.078000000000003, 58.737000000000002, 59.442999999999998, 60.082999999999998, 60.770000000000003, 61.448, 62.125999999999998, 62.756, 63.414999999999999, 64.082999999999998, 64.742000000000004, 65.420000000000002, 66.087999999999994, 66.747, 67.415000000000006]
self.y=[1553.5, 1595.0, 1497.8, 1695.5999999999999, 1328.7, 1279.0, 1547.8, 1412.8, 1037.0, 1473.5, 1447.4000000000001, 1532.5999999999999, 1484.2, 1169.5, 1395.2, 1183.5999999999999, 949.01999999999998, 1238.0999999999999, 1225.5999999999999, 924.80999999999995, 1650.5999999999999, 803.96000000000004, 1245.7, 1190.0, 1207.0, 1294.0, 1174.9000000000001, 1229.8, 1260.0, 1129.2, 1142.9000000000001, 987.63999999999999, 1389.5999999999999, 1366.0, 1102.0999999999999, 1325.5, 1258.9000000000001, 1285.7, 1217.5, 871.47000000000003, 820.24000000000001, 1388.7, 1391.0, 1400.3, 2482.5999999999999, 3360.5999999999999, 7013.5, 11560.0, 16525.0, 22538.0, 32556.0, 43878.0, 59093.0, 67977.0, 75949.0, 82316.0, 90213.0, 90294.0, 99928.0, 128240.0, 181280.0, 226380.0, 223260.0]
fit_tangent()
plt.ylim((-100,1000000))
plt.show()
def main():
this=get_Ton()
this.loop()
if __name__ == "__main__":
main()
Three issues here. First, the function polyn does not depend on the variable of integration T, since this variable is integrated out. Remove T from its list of parameters. Accordingly, drop one of numerical values in trueydata computation:
trueydata = vcurve(truexdata, 3, 4)
Second, quad returns a tuple (integral_value, integral_error). Use [0] to return only the integral value.
def polyn(x, A, Ea):
return integrate.quad(integrand, 25, x, args=(A, Ea))[0]
Third, provide an initial guess for parameter values to curve_fit. If you don't, it will likely report unable to determine the number of parameters to fit. Even if successful, it will blindly use all-ones for the initial guess. An initial guess supplied by a human with an understanding of the optimization problem is often the difference between success and failure of multivariable optimization.
popt, pcov = optimize.curve_fit(vcurve, xdata, ydata, [2, 3])

How to use the mlab iso_surface module in a Mayavi app

I'm trying to build a simple Mayavi script application which utilises the mlab iso_surface module.
However, when I run my app it throws up two windows, one showing my mayavi iso_surface plot and the other showing a blank "Edit properties" window. It seems that the mayavi scene is not being displayed in the specified view layout for the "Edit properties" window.
So my question is: Why is the mayavi iso_surface scene not appearing in the view layout, and how do I get it in there?
A simple test script which displays this behaviour is pasted below. I am using Canopy version: 2.1.1.3504 (64 bit), python 3.5.2 on a Windows 10 system.
[Note: I have modified my original question to include another question. How do I update the 's' data with the input from a Range object (mult_s)? I have had a go at doing this below, but with no success. It throws up: TraitError: Cannot set the undefined 's' attribute of a 'ArraySource' object.]
class Isoplot1(HasTraits):
scene = Instance(MlabSceneModel, ())
mult_s = Range(1, 5, 1)
#on_trait_change('scene.activated')
def _setup(self):
# Create x, y, z, s data
L = 10.
x, y, z = np.mgrid[-L:L:101j, -L:L:101j, -L:L:101j]
self.s0 = np.sqrt(4 * x ** 2 + 2 * y ** 2 + z ** 2)
# create the data pipeline
self.src1 = mlab.pipeline.scalar_field(x, y, z, self.s0)
# Create the plot
self.plot1 = self.scene.mlab.pipeline.iso_surface(
self.src1, contours=[5, ], opacity=0.5, color=(1, 1, 0)
)
#on_trait_change('mult_s')
def change_s(self):
self.src1.set(s=self.s0 * self.mult_s)
# Set the layout
view = View(Item('scene',
editor=SceneEditor(scene_class=MayaviScene),
height=400, width=600, show_label=False),
HGroup('mult_s',),
resizable=True
)
isoplot1 = Isoplot1()
isoplot1.configure_traits()
If you use self.scene.mlab.pipeline.scalar_field instead of mlab.pipeline.scalar_field this should not happen.
In general, you should avoid creating any visualization in the initializer. Instead you should always setup the scene when the scene.activated event is fired. To be safe for uses with raw mlab you should rewrite your code as follows.
from mayavi import mlab
from traits.api import HasTraits, Instance, on_trait_change
from traitsui.api import View, Item
from mayavi.core.ui.api import MayaviScene, SceneEditor, MlabSceneModel
import numpy as np
class Isoplot1(HasTraits):
scene = Instance(MlabSceneModel, ())
#on_trait_change('scene.activated')
def _setup(self):
# Create x, y, z, s data
L = 10.
x, y, z = np.mgrid[-L:L:101j, -L:L:101j, -L:L:101j]
s = np.sqrt(4 * x ** 2 + 2 * y ** 2 + z ** 2)
# create the data pipeline
self.src1 = mlab.pipeline.scalar_field(x, y, z, s)
# Create the plot
self.plot1 = self.scene.mlab.pipeline.iso_surface(
self.src1, contours=[5, ], opacity=0.5, color=(1, 1, 0)
)
# Set the layout
view = View(Item('scene',
editor=SceneEditor(scene_class=MayaviScene),
height=400, width=600, show_label=False),
resizable=True
)
isoplot1 = Isoplot1()
isoplot1.configure_traits()
You probably already know this but just in case you can also take a look at some of the other mayavi interactive examples in the documentation.

How to use ScatterInspector and ScatterInspectorOverlay?

I would like to use the chaco tools ScatterInspector and/or ScatterInspectorOverlay with enaml. I've set up a very simple controller and view (source below) but cannot determine how to proceed. I have tried unsuccessfully to follow the minimal and old examples I've found.
If I uncomment the overlay part for ScatterInspectorOverlay, the code fails to run with
File ".../chaco/scatter_inspector_overlay.py", line 51, in overlay if not plot or not plot.index or not getattr(plot, "value", True):
If I comment out the overlay part, I of course don't get the overlay behavior I want and also, on moving the mouse, get
File ".../chaco/tools/scatter_inspector.py", line 48, in normal_mouse_move index = plot.map_index((event.x, event.y), threshold=self.threshold)
view.enaml source:
from enaml.widgets.api import (
Window, Container, EnableCanvas,
)
enamldef ScatterView(Window):
attr controller
title = "Scatter Inspector Test"
initial_size = (640,480)
Container:
EnableCanvas:
component = controller.scatter_plot
controller.py source:
import enaml
from enaml.stdlib.sessions import show_simple_view
from traits.api import HasTraits, Instance
from chaco.api import Plot, ArrayPlotData, ScatterInspectorOverlay
from chaco.tools.api import ScatterInspector
from numpy import linspace, sin
class ScatterController(HasTraits):
scatter_plot = Instance(Plot)
def _scatter_plot_default(self):
# data
x = linspace(-14, 14, 100)
y = sin(x) * x**3
plotdata = ArrayPlotData(x = x, y = y)
# plot
scatter_plot = Plot(plotdata)
renderer = scatter_plot.plot(("x", "y"), type="scatter", color="red")
# inspector
scatter_plot.tools.append(ScatterInspector(scatter_plot))
# overlay
# scatter_plot.overlays.append( ScatterInspectorOverlay(
# scatter_plot,
# hover_color = 'red',
# hover_marker_size = 6,
# selection_marker_size = 6,
# selection_color = 'yellow',
# selection_outline_color='purple',
# selection_line_width = 3
# ))
#return
return scatter_plot
if __name__ == "__main__":
with enaml.imports():
from view import ScatterView
main_controller = ScatterController()
window = ScatterView(controller=ScatterController())
show_simple_view(window)
The problem with my above code was that I was adding ScatterInspector to scatter_plot rather than to renderer and that I was missing the [0] index to get renderer.
The key thing I was really wanting to do, though, was to be notified when the mouse was hovering over a data point and/or a data point was selected. I added when_hover_or_selection_changes which shows how to do that.
Working controller.py:
...
# plot
scatter_plot = Plot(plotdata)
renderer = scatter_plot.plot(("x", "y"), type="scatter", color="lightblue")[0]
# inspector
renderer.tools.append(ScatterInspector(renderer))
# overlay
renderer.overlays.append(ScatterInspectorOverlay(renderer,
hover_color="red",
hover_marker_size=6,
selection_marker_size=6,
selection_color="yellow",
selection_outline_color="purple",
selection_line_width=3))
...
# get notified when hover or selection changes
#on_trait_change('renderer.index.metadata')
def when_hover_or_selection_changes(self):
print 'renderer.index.metadata = ', self.renderer.index.metadata