I have a color photo of apple, how can I show only its outline (inside white, background black) with python/PIL?
Something like this should work.
from PIL import Image, ImageFilter
image = Image.open('your_image.png')
image = image.filter(ImageFilter.FIND_EDGES)
image.save('new_name.png')
If that doesn't give you the result you are looking for then you try implementing either Prewitt edge detection, Sobel edge detection or Canny edge detection using PIL and Python and other libraries see related question and the following example .
If you are trying to do particle detection / analysis rather than just edge detection, you can try using py4ij to call the ImageJ method you link to give you expect the same result, or try another Particle Analysis Python library EMAN alternately you can write a Particle detection algorithm using PIL, SciPy and NumPy.
If your object and background have fairly well contrast
from PIL import Image
image = Image.open(your_image_file)
mask=image.convert("L")
th=150 # the value has to be adjusted for an image of interest
mask = mask.point(lambda i: i < th and 255)
mask.save(file_where_to_save_result)
if higher contrast is in one (of 3 colors), you may split the image into bands instead of converting it into grey scale.
if an image or background is fairly complicated, more sophisticated processing will be required
Apple vs Lines
You can do it just using PIL and Python in less than 200 lines of code. Would be easier to use canny-edge-detection from a library.
Here is the steps. Convert to grayscale for lumens. Using kernel image processing detect edges using Sobel. Thin the edge using the magnitude and slope obtain from Sobel.
from PIL import Image
import math
def one_to_two_dimension_array(list_,columns):
#use list slice
return [ list_[i:i+columns] for i in range(0, len(list_),columns) ]
def flatten_matrix(matrix):
return [val for sublist in matrix for val in sublist]
def matrix_convole(matrix, kernel_matrix, multiplier):
return_list=[]
return_matrix=[]
border=(len(kernel_matrix) - 1) / 2;border=int(border)
center_kernel_pos=border
for matrix_row in range( len( matrix )):
for matrix_col in range(len( matrix[matrix_row] ) ):
accumulator = 0
if (matrix_row - border)<0 or \
(matrix_col-border)< 0 or \
(matrix_row+border) > (len( matrix )-border) or \
(matrix_col+border) > (len( matrix[matrix_row] )-border):
return_list.append(matrix[matrix_row][matrix_col])
continue
for kernel_row in range(len (kernel_matrix) ):
for kernel_col in range(len (kernel_matrix[kernel_row]) ):
relative_row= kernel_row - center_kernel_pos
relative_col= kernel_col - center_kernel_pos
kernel = kernel_matrix[kernel_row][kernel_col]
pixel = matrix [matrix_row + relative_row] [matrix_col + relative_col]
accumulator += pixel * kernel
return_list.append(accumulator* multiplier )
return_matrix = one_to_two_dimension_array( return_list, len( matrix[0] ) )
return return_matrix
def canny_round_degree(deg):
#0, 22.5, 45, 67.5, 90, 112.5, 135, 157.5, 180
if deg >= 0 and deg <= 22.5:
return 0
elif deg >= 22.5 and deg <= 67.5:
return 45
elif deg > 67.5 and deg <=112.5:
return 90
elif deg > 112.5 and deg <=157.5:
return 135
elif deg >= 157.5 and deg <= 180:
return 0
if deg <= 0 and deg >= -22.5:
return 0
elif deg <= -22.5 and deg >= -67.5:
return 135
elif deg < -67.5 and deg >= -112.5:
return 90
elif deg < -112.5 and deg >= -157.5:
return 45
elif deg <= -157.5 and deg >= -180:
return 0
image_path='apple.jpg'
gaussian_5x5_kernel=[[2,4,5,4,2],[4,9,12,9,4],[5,12,15,12,5],[4,9,12,9,4],[2,4,5,4,2]] #multiplier 1/159
sobel_kernel_gx=[[-1,0,1],[-2,0,2],[-1,0,1]]
sobel_kernel_gy=[[-1,-2,-1],[0,0,0],[1,2,1]]
im_list=list(Image.open(image_path).convert('L').getdata(0)) #grayscale, get first channel
im_width=Image.open(image_path).width
im_height=Image.open(image_path).height
im_matrix = one_to_two_dimension_array(im_list, im_width)
im_matrix_blur=matrix_convole(im_matrix,gaussian_5x5_kernel, 1/159)
sobel_gx_matrix=matrix_convole(im_matrix_blur,sobel_kernel_gx, 1)
sobel_gy_matrix=matrix_convole(im_matrix_blur,sobel_kernel_gy, 1)
sobel_gy_list=flatten_matrix(sobel_gy_matrix)
sobel_gx_list=flatten_matrix(sobel_gx_matrix)
sobel_g_magnitude_list = [math.hypot(gy,gx) for gx,gy in zip(sobel_gx_list,sobel_gy_list)]
sobel_g_angle_list = [ canny_round_degree(math.degrees(math.atan2(gy,gx))) for gx,gy in zip(sobel_gx_list,sobel_gy_list)]
sobel_g_angle_matrix = one_to_two_dimension_array(sobel_g_angle_list, im_width)
sobel_g_magnitude_matrix = one_to_two_dimension_array(sobel_g_magnitude_list, im_width)
suppression_list = []
for s_row in range( len( sobel_g_angle_matrix)):
for s_col in range(len( sobel_g_angle_matrix[s_row] ) ):
if (s_row - 1)<0 or \
(s_col-1)< 0 or \
(s_row+1) > (len( sobel_g_angle_matrix )-1) or \
(s_col+1) > (len( sobel_g_angle_matrix[s_row] )-1):
suppression_list.append(0)
continue
magnitude_in_question = sobel_g_magnitude_matrix[s_row][s_col]
#thresholding magnitude continue, arbitrary 129
if magnitude_in_question < 36:
suppression_list.append(0)
continue
angle_in_question = sobel_g_angle_matrix[s_row][s_col]
east_magnitude = sobel_g_magnitude_matrix[s_row][s_col-1]
west_magnitude = sobel_g_magnitude_matrix[s_row][s_col+1]
north_magnitude = sobel_g_magnitude_matrix[s_row-1][s_col]
south_magnitude = sobel_g_magnitude_matrix[s_row+1][s_col]
north_east_magnitude = sobel_g_magnitude_matrix[s_row-1][s_col-1]
north_west_magnitude = sobel_g_magnitude_matrix[s_row-1][s_col+1]
south_east_magnitude = sobel_g_magnitude_matrix[s_row+1][s_col-1]
south_west_magnitude = sobel_g_magnitude_matrix[s_row+1][s_col+1]
if angle_in_question == 0 and magnitude_in_question > east_magnitude \
and magnitude_in_question > west_magnitude:
suppression_list.append(1)
elif angle_in_question == 90 and magnitude_in_question > north_magnitude \
and magnitude_in_question > south_magnitude:
suppression_list.append(1)
elif angle_in_question == 135 and magnitude_in_question > north_west_magnitude \
and magnitude_in_question > south_east_magnitude:
suppression_list.append(1)
elif angle_in_question == 45 and magnitude_in_question > north_east_magnitude \
and magnitude_in_question > south_west_magnitude:
suppression_list.append(1)
else:
suppression_list.append(0)
new_img = Image.new('1', (im_width,im_height)) #bw=1;grayscale =L
new_img.putdata( suppression_list )
new_img.save('apple-lines.png', 'PNG')
Related
# Kepler's Laws.py
# plots the orbit of a planet in an eccentric orbit to illustrate
# the sweeping out of equal areas in equal times, with sun at focus
# The eccentricity of the orbit is random and determined by the initial velocity
# program uses normalised units (G =1)
# program by Peter Borcherds, University of Birmingham, England
from vpython import *
from random import random
from IPython import display
import pandas as pd
def MonthStep(time, offset=20, whole=1): # mark the end of each "month"
global ccolor # have to make it global, since label uses it before it is updated
if whole:
Ltext = str(int(time * 2 + dt)) # end of 'month', printing twice time gives about 12 'months' in 'year'
else:
Ltext = duration + str(time * 2) + ' "months"\n Initial speed: ' + str(round(speed, 3))
ccolor = color.white
label(pos=planet.pos, text=Ltext, color=ccolor,
xoffset=offset * planet.pos.x, yoffset=offset * planet.pos.y)
ccolor = (0.5 * (1 + random()), random(), random()) # randomise colour of radial vector
return ccolor
scene = display(title="Kepler's law of equal areas", width=1000, height=1000, range=3.2)
duration = 'Period: '
sun = sphere(color=color.yellow, radius=0.1) # motion of sun is ignored (or centre of mass coordinates)
scale = 1.0
poss = vector(0, scale, 0)
planet = sphere(pos=poss, color=color.cyan, radius=0.02)
while 1:
velocity = -vector(0.7 + 0.5 * random(), 0, 0) # gives a satisfactory range of eccentricities
##velocity = -vector(0.984,0,0) # gives period of 12.0 "months"
speed = mag(velocity)
steps = 20
dt = 0.5 / float(steps)
step = 0
time = 0
ccolor = color.white
oldpos = vector(planet.pos)
ccolor = MonthStep(time)
curve(pos=[sun.pos, planet.pos], color=ccolor)
while not (oldpos.x > 0 and planet.pos.x < 0):
rate(steps * 2) # keep rate down so that development of orbit can be followed
time += dt
oldpos = vector(planet.pos) # construction vector(planet.pos) makes oldpos a varible in its own right
# oldpos = planet.pos makes "oldposs" point to "planet.pos"
# oldposs = planet.pos[:] does not work, because vector does not permit slicing
denom = mag(planet.pos) ** 3
velocity -= planet.pos * dt / denom # inverse square law; force points toward sun
planet.pos += velocity * dt
# plot orbit
curve(pos=[oldpos, planet.pos], color=color.red)
step += 1
if step == steps:
step = 0
ccolor = MonthStep(time)
curve(pos=[sun.pos, planet.pos], color=color.white)
else:
# plot radius vector
curve(pos=[sun.pos, planet.pos], color=ccolor)
if scene.kb.keys:
print
"key pressed"
duration = 'Duration: '
break
MonthStep(time, 50, 0)
label(pos=(2.5, -2.5, 0), text='Click for another orbit')
scene.mouse.getclick()
for obj in scene.objects:
if obj is sun or obj is planet: continue
obj.visible = 0 # clear the screen to do it again
I copied Kepler's Laws code in google and compiled it on pycharm.
But there is an error that
scene = display(title="Kepler's law of equal areas", width=1000, height=1000, range=3.2)
TypeError: 'module' object is not callable
I found some information on google that "pandas" library can improve this error so I tried it but I can't improve this error.
What should I do?
Replace "display" with "canvas", which is the correct name of this entity.
import numpy as np
from vispy import app, scene
from vispy.visuals import transforms
canvas = scene.SceneCanvas(keys='interactive', show=True)
vb = canvas.central_widget.add_view()
vb.camera = 'turntable'
vb.camera.rect = (-10, -10, 20, 20)
box = scene.visuals.Box(width=1, height=2, depth=3, color=(0, 0, 1, 0.3),
edge_color='green')
vb.add(box)
# Define a scale and translate transformation :
box.transform = transforms.STTransform(translate=(0., 0., 0.),
scale=(1., 1., 1.))
#canvas.events.key_press.connect
def on_key_press(ev):
tr = np.array(box.transform.translate)
sc = np.array(box.transform.scale)
if ev.text in '+':
tr[0] += .1
elif ev.text == '-':
tr[0] -= .1
elif ev.text == '(':
sc[0] += .1
elif ev.text == ')':
sc[0] -= .1
box.transform.translate = tr
box.transform.scale = sc
print('Translate (x, y, z): ', list(tr),
'\nScale (x, y, z): ', list(sc), '\n')
if __name__ == '__main__':
import sys
if sys.flags.interactive != 1:
app.run()
In the above code if I add a MatrixTransform, and rotate the cube and then apply scaling, the cube becomes a Rhombus
What I would like to achieve is to rotate the cube in a canvas and scale it only in X direction, without other dimensions getting affected
I think we covered this in a vispy repository bug report. The solution was to swap the order of the matrix transform and st transform in your multiplication. If this is still an issue, could you provide your code when you are using the matrix and we'll continue debugging this. Thanks.
I am trying to make a histogram where there is an entered list with integers from 1-10 repeated x number of times. So, if there are three 6's in the list, the height of the bar should be 60 because 20*3=60.
I am trying to add a label to my bar graph / histogram by labeling with numbers in order, but I am not sure how to do so. Refer to the image below to see what the numbers should look like.
def draw_histogram(t,dataList):
'''draw_histogram(t,dataList) -> None
uses turtle t to draw a histogram of dataList
dataList must contain integers from 0-10'''
t = 0
while t != 11:
if dataList.count(t) == 0:
height = 0
elif dataList.count(t) == 1:
height = 20
elif dataList.count(t) == 2:
height = 40
elif dataList.count(t) == 3:
height = 60
elif dataList.count(t) == 4:
height = 80
elif dataList.count(t) == 5:
height = 100
elif dataList.count(t) == 6:
height = 120
elif dataList.count(t) == 7:
height = 140
elif dataList.count(t) == 8:
height = 160
elif dataList.count(t) == 9:
height = 180
elif dataList.count(t) == 10:
height = 200
bob.forward(height)
bob.right(90)
bob.forward(20)
bob.right(90)
bob.forward(height)
bob.right(90)
bob.forward(20)
bob.left(180)
bob.penup()
bob.forward(35)
bob.left(90)
bob.pendown()
t += 1
return height
# test suite
import turtle
turtle.setup(600,300) # Change the width of the drawing to 600px and the height to 300px.
wn = turtle.Screen()
bob = turtle.Turtle()
dataList = [6,8,0,7,7,9,2,9,10,4,8,7,6,9,1,4,6,7,5,7,2,10,4,5,5,6,8]
# move bob back a little bit so he has room
bob.penup()
bob.back(200)
bob.pendown()
bob.left(90)
# draw the histogram
draw_histogram(bob,dataList)
wn.mainloop()
Click here for what the finished product should look like.
is this what you are trying to do?
def draw_histogram(t,dataList):
'''draw_histogram(t,dataList) -> None
uses turtle t to draw a histogram of dataList
dataList must contain integers from 0-10'''
t = 0
while t != 11:
if dataList.count(t) == 0:
height = 0
elif dataList.count(t) == 1:
height = 20
elif dataList.count(t) == 2:
height = 40
elif dataList.count(t) == 3:
height = 60
elif dataList.count(t) == 4:
height = 80
elif dataList.count(t) == 5:
height = 100
elif dataList.count(t) == 6:
height = 120
elif dataList.count(t) == 7:
height = 140
elif dataList.count(t) == 8:
height = 160
elif dataList.count(t) == 9:
height = 180
elif dataList.count(t) == 10:
height = 200
bob.right(90)
style = ('Courier', 7, 'italic')
bob.pu()
bob.forward(7)
bob.pd()
bob.write(str(t), font=style, align='center')
bob.pu()
bob.back(7)
bob.pd()
bob.left(90)
bob.forward(height)
bob.right(90)
bob.forward(20)
bob.right(90)
bob.forward(height)
bob.right(90)
bob.forward(20)
bob.left(180)
bob.penup()
bob.forward(35)
bob.left(90)
bob.pendown()
t += 1
return height
# test suite
import turtle
turtle.setup(600,300) # Change the width of the drawing to 600px and the height to 300px.
wn = turtle.Screen()
bob = turtle.Turtle()
dataList = [6,8,0,7,7,9,2,9,10,4,8,7,6,9,1,4,6,7,5,7,2,10,4,5,5,6,8]
# move bob back a little bit so he has room
bob.penup()
bob.back(200)
bob.pendown()
bob.left(90)
# draw the histogram
draw_histogram(bob,dataList)
wn.mainloop()
I have a circle of radius 10 m. I want to count the number of vehicles entering the circle it (the distance from the center car <= 10m)
I'm right . I can use the toolbar "Minitor" to count the number of vehicles currently in liquidation xe.nhung "minitor" much larger than the actual number of vehicles that pass through the circle. I attached the "minitor" by "total-cars".
how to properly count the number of vehicles?
ask cars
[
if distancexy 0 0 < 10
[
set total-cars (total-cars + 1)
]
]
I am not very sure about your question, but maybe this code could help you:
set total-cars count cars with [distancexy 0 0 <= 10]
You can use the following code in the monitor control directly:
count cars with [distancexy 0 0 <= 10]
import cv2
import time
bgsMOG = cv2.createBackgroundSubtractorMOG2(detectShadows=False)
kernal=cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(3,3))
cap = cv2.VideoCapture(0)
counter =0
time.sleep(2)
if cap:
while True:
ret, frame = cap.read()
if ret:
#fgmask = bgsMOG.apply(frame, None, 0.01)
blur = cv2.GaussianBlur(frame, (5, 5), 0)
fgmask = bgsMOG.apply(blur)
morhpho = cv2.morphologyEx(fgmask, cv2.MORPH_OPEN, kernal)
#line for detection
cv2.line(frame,(20,270),(320,270),(175,175,0),5)
_,contours, hierarchy = cv2.findContours(morhpho,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
ax1=20 #coordinate of line where vehicle will be count if intersect
ay1=270
ax2=320
ay2=270
try: hierarchy = hierarchy[0]
except: hierarchy = []
#for contour, hier in zip(contours, hierarchy):
for (i, contour) in enumerate(contours):
(x,y,w,h) = cv2.boundingRect(contour)
if w > 20 and h > 25:
rec=cv2.rectangle(frame, (x,y), (x+w,y+h), (180, 0, 0), 1)
x1=w/2 #to find centroid
y1=h/2
cx=x+x1
cy=y+y1
centroid=(cx,cy)
M = cv2.moments(contour)
cX = int(M["m10"] / M["m00"])
cY = int(M["m01"] / M["m00"])
# draw the contour and center of the shape on the image
cv2.circle(frame, (cX, cY), 2, (255, 255, 255), -1)
cv2.circle(frame,(int(cx),int(cy)),1,(0,255,0),-1)
dy=cY-270 #my first code to increase counter
print("centroid",cX,":",cY)
if dy==0:
if (cX<=320)and(cX>=20):
counter=counter+1
print("1st ct",counter)
print len(contour)
#FileName = "D:/data/" + str(y) + ".jpg"
#cv2.imshow("cropped",rec)
#cv2.imwrite(FileName,rec)
if cy==270:
if centroid > (27, 268) and centroid < (325, 285):
if (cX <= 320) and (cX >= 20):
counter =counter+1
print "counter=", counter
if cY > 10 and cY < 250:
cv2.putText(frame, str(counter),(10,150),cv2.FONT_HERSHEY_SIMPLEX,2, (255, 0, 0), 1, True)
#cv2.resizeWindow('Output',320,180)
cv2.imshow('Output', frame)
cv2.imshow('mor', morhpho)
cv2.imshow('blur', blur)
#cv2.imshow('FGMASK', morhpho)
key = cv2.waitKey(1)
if key == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
Is there an algorithm to find out the wavelength of the color given the hue value (between 0 degree to 360 degree). Is there any built-in function in MATLABfor the same?
While Mark Ransom and Franco Callari are completely right that you cannot recover the spectrum of a perceptual color, nor unambiguously map hue values to wavelengths, you could definitely piece something together if you just want the corresponding monochromatic wavelength.
The part of the hue cycle between 270 and 360 is another problem. There is nothing corresponding to pink or magenta in the light spectrum, so let's assume that we only use hue values between 0 and 270 degrees.
Estimating that the usable part of the visible spectrum is 400-650nm, with wavelength L (in nm) and hue value H (in degrees), you can improvise this:
L = 650 - 250 / 270 * H
650 is the maximum wavelength, 250 is the wavelength range and 270 is the hue range.
I think this should be in the right direction but there may of course be room for improvement. You might be able to get better results comparing between input hues and corresponding colors on a visible spectrum chart, and then adjusting the values somewhat.
I cant provide simple solution, but there is something you need to consider:
The visible part of the spektrum is roughly between 380nm (UV-border) and 780nm (IR-border). But what you see (hue) depends on the cone-cells triggered. Above 660nm, the M-cone is not triggered at all, so everything between 660nm and 780nm is hue 0°.
at 580nm you have yellow with hue 60°, the purest green is at about 535nm, so that is 120°, and the purest blue (240°) is at about 457nm.
if you apply a linear function, yellow should be at 597nm - which it is not, so you'd need a more complex approach.
above blue, the red cone still gets triggered until we see violet, but we wont reach red again on higher frequencies, so you cant go above approximately 300°.
the hue range between 300° and 360° has no æquivalent in visible spektrum, it can only be simulated by mixing high frequency light (blue or violet) with red light, which results in something between magenta and red on the purple-line.
It is possible to find the dominant wavelength of a color/hue. But as said most colors arn’t monochromatic and the same color can be constructed with different “mixes” of wavelengths. I.e. metamerism.
Also, for the extra spectral magenta and violet colors only a complementary wavelength can be specified. I.e. the hue/dominant wavelength that additively mixes to white. Also white must be specified, since the is no absolute white due to adaption.
Also psychologically our perception of hues doesn’t follow dominant hue lines. Se the Munsell and NCS systems.
Here you can calulate dominant wavelength from RGB values or different CIE systems: http://www.brucelindbloom.com/index.html?Calc.html
I don’t have the formula though.
You can then transform RGB to/from HSL and similar. And to/from Munsell or NCS perceptual hues (NCS values are proprietary, so you have to pay and use their software).
Short answer: NO. A given hue can in general be produced by a triple infinity of wavelengths.
A "physical color" is a combination of pure spectral colors (in the visible range). In principle there exist infinitely many distinct spectral colors, and so the set of all physical colors may be thought of as an infinite-dimensional vector space (a Hilbert space). This space is typically notated Hcolor. More technically, the space of physical colors may be considered to be the topological cone over the simplex whose vertices are the spectral colors, with white at the centroid of the simplex, black at the apex of the cone, and the monochromatic color associated with any given vertex somewhere along the line from that vertex to the apex depending on its brightness.
. . .
This system implies that for any hue or non-spectral color not on the boundary of the chromaticity diagram, there are infinitely many distinct physical spectra that are all perceived as that hue or color. So, in general there is no such thing as the combination of spectral colors that we perceive as (say) a specific version of tan; instead there are infinitely many possibilities that produce that exact color. The boundary colors that are pure spectral colors can be perceived only in response to light that is purely at the associated wavelength, while the boundary colors on the "line of purples" can each only be generated by a specific ratio of the pure violet and the pure red at the ends of the visible spectral colors.
The CIE chromaticity diagram is horseshoe-shaped, with its curved edge corresponding to all spectral colors (the spectral locus), and the remaining straight edge corresponding to the most saturated purples, mixtures of red and violet.
(Source)
I found this site that converts a given wavelength to a hue. With a bit of work, you could actually reverse the process. It's not ideal, but I trust the guy who is a consultant in applied mathematics more than myself in solving this issue. That's that.
https://www.johndcook.com/wavelength_to_RGB.html
function convert(input) {
var w = parseFloat(input);
if (w >= 380 && w < 440) {
r = -(w - 440) / (440 - 380);
g = 0.0;
b = 1.0;
} else if (w >= 440 && w < 490) {
r = 0.0;
g = (w - 440) / (490 - 440);
b = 1.0;
} else if (w >= 490 && w < 510) {
r = 0.0;
g = 1.0;
b = -(w - 510) / (510 - 490);
} else if (w >= 510 && w < 580) {
r = (w - 510) / (580 - 510);
g = 1.0;
b = 0.0;
} else if (w >= 580 && w < 645) {
r = 1.0;
g = -(w - 645) / (645 - 580);
b = 0.0;
} else if (w >= 645 && w < 781) {
r = 1.0;
g = 0.0;
b = 0.0;
} else {
r = 0.0;
g = 0.0;
b = 0.0;
}
// Let the intensity fall off near the vision limits
if (w >= 380 && w < 420)
factor = 0.3 + 0.7 * (w - 380) / (420 - 380);
else if (w >= 420 && w < 701)
factor = 1.0;
else if (w >= 701 && w < 781)
factor = 0.3 + 0.7 * (780 - w) / (780 - 700);
else
factor = 0.0;
var gamma = 0.80;
var R = (r > 0 ? 255 * Math.pow(r * factor, gamma) : 0);
var G = (g > 0 ? 255 * Math.pow(g * factor, gamma) : 0);
var B = (b > 0 ? 255 * Math.pow(b * factor, gamma) : 0);
return [R, G, B]
}
There's no conversion because they don't overlap.
Hue moves you around an RGB colour space, usually sRGB that almost all consumer digital equipment uses. That's a subset of the colours that our visual systems recognise under normal conditions (defined by CIE 1931), and does not overlap the vibrant line of colours perceived at monochromatic wavelengths of light at all.
Though Hue from 0-120 (reddish orange to yellowish green) and near 240 (indigo) are reasonably close, sRGB is quite functional if you don't care about all the washed out greens and blues, and you can fake the violet and red ends of the full spectrum by making them darker Hue around 270 or 330 respectively, and the only place you can't really approximate is around 180, computer cyan just isn't close at all to the monochromatic vibrant blue-greens.