Licensed under the Apache License, Version 2.0 (the "License");
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
This notebook demonstrates obtaining OpenGL context on GPU Colab kernels.
!pip install -q lucid>=0.2.3
!pip install -q moviepy
import numpy as np
import json
import moviepy.editor as mvp
from google.colab import files
import lucid.misc.io.showing as show
from lucid.misc.gl.glcontext import create_opengl_context
# Now it's safe to import OpenGL and EGL functions
import OpenGL.GL as gl
# create_opengl_context() creates GL context that is attached to an
# offscreen surface of specified size. Note that rendering to buffers
# of different size and format is still possible with OpenGL Framebuffers.
#
# Users are expected to directly use EGL calls in case more advanced
# context management is required.
WIDTH, HEIGHT = 640, 480
create_opengl_context((WIDTH, HEIGHT))
# OpenGL context is available here.
print(gl.glGetString(gl.GL_VERSION))
print(gl.glGetString(gl.GL_VENDOR))
#print(gl.glGetString(gl.GL_EXTENSIONS))
b'4.5.0 NVIDIA 384.111' b'NVIDIA Corporation'
# Let's render something!
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
gl.glBegin(gl.GL_TRIANGLES)
gl.glColor3f(1.0, 0.0, 0.0)
gl.glVertex2f(0, 1)
gl.glColor3f(0.0, 1.0, 0.0)
gl.glVertex2f(-1, -1)
gl.glColor3f(0.0, 0.0, 1.0)
gl.glVertex2f(1, -1)
gl.glEnd()
# Read the result
img_buf = gl.glReadPixelsub(0, 0, WIDTH, HEIGHT, gl.GL_RGB, gl.GL_UNSIGNED_BYTE)
img = np.frombuffer(img_buf, np.uint8).reshape(HEIGHT, WIDTH, 3)[::-1]
show.image(img/255.0)
We now have the full power of modern OpenGL in our hands! Let's do something interesting with it!
Fetching the source and rendering the amaizing shader by Kali from ShaderToy. You can also substitute a different shader_id
, but note that only single-pass shaders that don't use textures are supported by the code below.
shader_id = 'Xtf3Rn' # https://www.shadertoy.com/view/Xtf3Rn
shader_json = !curl -s 'https://www.shadertoy.com/shadertoy' \
-H 'Referer: https://www.shadertoy.com/view/$shader_id' \
--data 's=%7B%20%22shaders%22%20%3A%20%5B%22$shader_id%22%5D%20%7D'
shader_data = json.loads(''.join(shader_json))[0]
assert len(shader_data['renderpass']) == 1, "Only single pass shareds are supported"
assert len(shader_data['renderpass'][0]['inputs']) == 0, "Input channels are not supported"
shader_code = shader_data['renderpass'][0]['code']
from OpenGL.GL import shaders
vertexPositions = np.float32([[-1, -1], [1, -1], [-1, 1], [1, 1]])
VERTEX_SHADER = shaders.compileShader("""
#version 330
layout(location = 0) in vec4 position;
out vec2 UV;
void main()
{
UV = position.xy*0.5+0.5;
gl_Position = position;
}
""", gl.GL_VERTEX_SHADER)
FRAGMENT_SHADER = shaders.compileShader("""
#version 330
out vec4 outputColor;
in vec2 UV;
uniform sampler2D iChannel0;
uniform vec3 iResolution;
vec4 iMouse = vec4(0);
uniform float iTime = 0.0;
""" + shader_code + """
void main()
{
mainImage(outputColor, UV*iResolution.xy);
}
""", gl.GL_FRAGMENT_SHADER)
shader = shaders.compileProgram(VERTEX_SHADER, FRAGMENT_SHADER)
time_loc = gl.glGetUniformLocation(shader, 'iTime')
res_loc = gl.glGetUniformLocation(shader, 'iResolution')
def render_frame(time):
gl.glClear(gl.GL_COLOR_BUFFER_BIT)
with shader:
gl.glUniform1f(time_loc, time)
gl.glUniform3f(res_loc, WIDTH, HEIGHT, 1.0)
gl.glEnableVertexAttribArray(0);
gl.glVertexAttribPointer(0, 2, gl.GL_FLOAT, False, 0, vertexPositions)
gl.glDrawArrays(gl.GL_TRIANGLE_STRIP, 0, 4)
img_buf = gl.glReadPixels(0, 0, WIDTH, HEIGHT, gl.GL_RGB, gl.GL_UNSIGNED_BYTE)
img = np.frombuffer(img_buf, np.uint8).reshape(HEIGHT, WIDTH, 3)[::-1]
return img
show.image(render_frame(10.0)/255.0, format='jpeg')
Use MoviePy to generate a video.
clip = mvp.VideoClip(render_frame, duration=10.0)
clip.write_videofile('out.mp4', fps=60)
files.download('out.mp4')
[MoviePy] >>>> Building video out.mp4 [MoviePy] Writing video out.mp4
100%|█████████▉| 600/601 [00:16<00:00, 35.86it/s]
[MoviePy] Done. [MoviePy] >>>> Video ready: out.mp4