totte/Program.cs

920 lines
31 KiB
C#
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

using OpenTK.Graphics.OpenGL4;
using OpenTK.Mathematics;
using OpenTK.Windowing.Common;
using OpenTK.Windowing.Common.Input;
using OpenTK.Windowing.Desktop;
using OpenTK.Windowing.GraphicsLibraryFramework;
// https://docs.sixlabors.com/api/ImageSharp/SixLabors.ImageSharp.Image.html
using Image = SixLabors.ImageSharp.Image;
using SixLabors.Fonts;
using SixLabors.ImageSharp.Metadata.Profiles.Exif;
using SixLabors.ImageSharp.Metadata.Profiles.Xmp;
using SixLabors.ImageSharp.Drawing.Processing;
using SixLabors.ImageSharp.Drawing;
using System;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Xml.Linq;
namespace SemiColinGames;
public class FpsCounter {
private readonly int[] frameTimes = new int[60];
private double fps = 0;
private int idx = 0;
public int Fps {
get => (int) Math.Ceiling(fps);
}
public void Update() {
var now = Environment.TickCount; // ms
if (frameTimes[idx] != 0) {
var timeElapsed = now - frameTimes[idx];
fps = 1000.0 * frameTimes.Length / timeElapsed;
}
frameTimes[idx] = now;
idx = (idx + 1) % frameTimes.Length;
}
}
public class CameraInfo {
public readonly Vector2i Resolution;
private CameraInfo(Vector2i resolution) {
Resolution = resolution;
}
public static readonly CameraInfo NIKON_D7000 = new(new Vector2i(4928, 3264));
public static readonly CameraInfo CANON_EOS_R6M2 = new(new Vector2i(6000, 4000));
public static readonly CameraInfo IPHONE_12_MINI = new(new Vector2i(4032, 3024));
}
// FIXME: switch to immediate mode??
// https://gamedev.stackexchange.com/questions/198805/opentk-immediate-mode-on-net-core-doesnt-work
// https://www.youtube.com/watch?v=Q23Kf9QEaO4
public class Shader : IDisposable {
public int Handle;
private bool init = false;
public Shader() {}
public void Init() {
init = true;
int VertexShader;
int FragmentShader;
string VertexShaderSource = @"
#version 330
layout(location = 0) in vec3 aPosition;
layout(location = 1) in vec2 aTexCoord;
out vec2 texCoord;
uniform mat4 projection;
void main(void) {
texCoord = aTexCoord;
gl_Position = vec4(aPosition, 1.0) * projection;
}";
string FragmentShaderSource = @"
#version 330
out vec4 outputColor;
in vec2 texCoord;
uniform sampler2D texture0;
uniform vec4 color;
void main() {
outputColor = texture(texture0, texCoord) * color;
}";
VertexShader = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(VertexShader, VertexShaderSource);
FragmentShader = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(FragmentShader, FragmentShaderSource);
GL.CompileShader(VertexShader);
int success;
GL.GetShader(VertexShader, ShaderParameter.CompileStatus, out success);
if (success == 0) {
string infoLog = GL.GetShaderInfoLog(VertexShader);
Console.WriteLine(infoLog);
}
GL.CompileShader(FragmentShader);
GL.GetShader(FragmentShader, ShaderParameter.CompileStatus, out success);
if (success == 0) {
string infoLog = GL.GetShaderInfoLog(FragmentShader);
Console.WriteLine(infoLog);
}
Handle = GL.CreateProgram();
GL.AttachShader(Handle, VertexShader);
GL.AttachShader(Handle, FragmentShader);
GL.LinkProgram(Handle);
GL.GetProgram(Handle, GetProgramParameterName.LinkStatus, out success);
if (success == 0) {
string infoLog = GL.GetProgramInfoLog(Handle);
Console.WriteLine(infoLog);
}
GL.DetachShader(Handle, VertexShader);
GL.DetachShader(Handle, FragmentShader);
GL.DeleteShader(FragmentShader);
GL.DeleteShader(VertexShader);
}
public void Use() {
if (!init) {
Console.WriteLine("Shader.Use(): must call Init() first");
}
GL.UseProgram(Handle);
}
private bool disposedValue = false;
protected virtual void Dispose(bool disposing) {
if (!disposedValue) {
GL.DeleteProgram(Handle);
disposedValue = true;
}
}
~Shader() {
if (disposedValue == false) {
Console.WriteLine("~Shader(): resource leak? Dispose() should be called manually.");
}
}
public void Dispose() {
Dispose(true);
GC.SuppressFinalize(this);
}
public int GetAttribLocation(string name) {
return GL.GetAttribLocation(Handle, name);
}
public int GetUniformLocation(string name) {
return GL.GetUniformLocation(Handle, name);
}
}
// FIXME: this should probably be IDisposable?
public class Photo {
public string Filename;
public bool Loaded = false;
public Vector2i Size;
public DateTime DateTimeOriginal;
public string CameraModel = "";
public string LensModel = "";
public string FocalLength = "<unk>";
public string FNumber = "<unk>";
public string ExposureTime = "<unk>";
public string IsoSpeed = "<unk>";
public int Rating = 0;
public ushort Orientation = 1;
private Texture texture;
private Texture placeholder;
private Image<Rgba32>? image = null;
public Photo(string filename, Texture placeholder) {
Filename = filename;
this.placeholder = placeholder;
texture = placeholder;
DateTime creationTime = File.GetCreationTime(filename); // Local time.
DateTimeOriginal = creationTime;
ImageInfo info = Image.Identify(filename);
Size = new(info.Size.Width, info.Size.Height);
ParseExif(info.Metadata.ExifProfile);
TryParseRating(info.Metadata.XmpProfile, out Rating);
}
public async void LoadAsync() {
// We don't assign to this.image until Load() is done, because we might
// edit the image due to rotation (etc) and don't want to try generating
// a texture for it until that's already happened.
Image<Rgba32> tmp = await Image.LoadAsync<Rgba32>(Filename);
Util.RotateImageFromExif(tmp, Orientation);
image = tmp;
}
public async void UnloadAsync() {
Loaded = false;
if (texture != placeholder) {
await Task.Run( () => { texture.Dispose(); });
texture = placeholder;
}
}
private bool TryParseRating(XmpProfile? xmp, out int rating) {
rating = 0;
if (xmp == null) {
return false;
}
XDocument? doc = xmp.GetDocument();
if (doc == null) {
return false;
}
XElement? root = doc.Root;
if (root == null) {
return false;
}
foreach (XElement elt in root.Descendants()) {
if (elt.Name == "{http://ns.adobe.com/xap/1.0/}Rating") {
if (int.TryParse(elt.Value, out rating)) {
return true;
}
}
}
return false;
}
// Exif (and other image metadata) reference, from the now-defunct Metadata Working Group:
// https://web.archive.org/web/20180919181934/http://www.metadataworkinggroup.org/pdf/mwg_guidance.pdf
//
// Specifically:
//
// In general, date/time metadata is being used to describe the following scenarios:
// * Date/time original specifies when a photo was taken
// * Date/time digitized specifies when an image was digitized
// * Date/time modified specifies when a file was modified by the user
//
// Original Date/Time Creation date of the intellectual content (e.g. the photograph), rather than the creation date of the content being shown
// Exif DateTimeOriginal (36867, 0x9003) and SubSecTimeOriginal (37521, 0x9291)
// IPTC DateCreated (IIM 2:55, 0x0237) and TimeCreated (IIM 2:60, 0x023C)
// XMP (photoshop:DateCreated)
//
// Digitized Date/Time Creation date of the digital representation
// Exif DateTimeDigitized (36868, 0x9004) and SubSecTimeDigitized (37522, 0x9292)
// IPTC DigitalCreationDate (IIM 2:62, 0x023E) and DigitalCreationTime (IIM 2:63, 0x023F)
// XMP (xmp:CreateDate)
//
// Modification Date/Time Modification date of the digital image file
// Exif DateTime (306, 0x132) and SubSecTime (37520, 0x9290)
// XMP (xmp:ModifyDate)
private void ParseExif(ExifProfile? exifs) {
if (exifs == null) {
return;
}
IExifValue<ushort>? orientation;
if (exifs.TryGetValue(ExifTag.Orientation, out orientation)) {
Orientation = orientation.Value;
}
IExifValue<string>? model;
if (exifs.TryGetValue(ExifTag.Model, out model)) {
CameraModel = model.Value ?? "";
}
IExifValue<string>? lensModel;
if (exifs.TryGetValue(ExifTag.LensModel, out lensModel)) {
LensModel = lensModel.Value ?? "";
}
IExifValue<Rational>? focalLength;
if (exifs.TryGetValue(ExifTag.FocalLength, out focalLength)) {
Rational r = focalLength.Value;
FocalLength = $"{r.Numerator / r.Denominator}mm";
}
IExifValue<Rational>? fNumber;
if (exifs.TryGetValue(ExifTag.FNumber, out fNumber)) {
Rational r = fNumber.Value;
if (r.Denominator == 1) {
FNumber = $"f/{r.Numerator}";
} else {
if (r.Denominator != 10) {
Console.WriteLine($"*** WARNING: unexpected FNumber denominator: {r.Denominator}");
}
if (r.Numerator % 10 == 0) {
FNumber = $"f/{r.Numerator / 10}";
} else {
FNumber= $"f/{r.Numerator / 10}.{r.Numerator % 10}";
}
}
}
IExifValue<Rational>? exposureTime;
if (exifs.TryGetValue(ExifTag.ExposureTime, out exposureTime)) {
Rational r = exposureTime.Value;
if (r.Numerator == 1) {
ExposureTime = $"1/{r.Denominator}";
} else if (r.Numerator == 10) {
ExposureTime = $"1/{r.Denominator / 10}";
} else if (r.Denominator == 1) {
ExposureTime = $"{r.Numerator }\"";
} else if (r.Denominator == 10) {
ExposureTime = $"{r.Numerator / 10}.{r.Numerator % 10}\"";
} else {
Console.WriteLine($"*** WARNING: unexpected ExposureTime: {r.Numerator}/{r.Denominator}");
ExposureTime = r.ToString();
}
}
IExifValue<ushort[]>? isoSpeed;
if (exifs.TryGetValue(ExifTag.ISOSpeedRatings, out isoSpeed)) {
ushort[]? iso = isoSpeed.Value;
if (iso != null) {
if (iso.Length != 1) {
Console.WriteLine($"*** WARNING: unexpected ISOSpeedRatings array length: {iso.Length}");
}
if (iso.Length >= 1) {
IsoSpeed = $"ISO {iso[0]}";
}
}
}
// FIXME: I think the iPhone stores time in UTC but other cameras report it in local time.
IExifValue<string>? dateTimeOriginal;
if (exifs.TryGetValue(ExifTag.DateTimeOriginal, out dateTimeOriginal)) {
DateTime date;
if (DateTime.TryParseExact(
dateTimeOriginal.Value ?? "",
"yyyy:MM:dd HH:mm:ss",
System.Globalization.CultureInfo.InvariantCulture,
System.Globalization.DateTimeStyles.AssumeLocal,
out date)) {
DateTimeOriginal = date;
} else {
Console.WriteLine($"*** WARNING: unexpected DateTimeOriginal value: {dateTimeOriginal.Value}");
}
}
}
public Texture Texture() {
if (texture == placeholder && image != null) {
// The texture needs to be created on the GL thread, so we instantiate
// it here (since this is called from OnRenderFrame), as long as the
// image is ready to go.
texture = new Texture(image);
image.Dispose();
image = null;
Loaded = true;
}
return texture;
}
public string Description() {
string date = DateTimeOriginal.ToString("yyyy-MM-dd HH:mm:ss");
string shootingInfo = $"{date} {FocalLength}, {FNumber} at {ExposureTime}, {IsoSpeed}";
return String.Format("{0,-60} {1,-50} {2}", shootingInfo, $"{CameraModel} {LensModel}", Filename);
}
}
public class Texture : IDisposable {
public int Handle;
public Vector2i Size;
public Texture(Image<Rgba32> image) {
Size = new Vector2i(image.Width, image.Height);
byte[] pixelBytes = new byte[Size.X * Size.Y * Unsafe.SizeOf<Rgba32>()];
image.CopyPixelDataTo(pixelBytes);
Handle = GL.GenTexture();
if (Handle > maxHandle) {
// Console.WriteLine("GL.GenTexture #" + Handle);
maxHandle = Handle;
}
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, Handle);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat.Rgba, Size.X, Size.Y, 0, PixelFormat.Rgba, PixelType.UnsignedByte, pixelBytes);
//GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int) TextureMinFilter.LinearMipmapLinear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int) TextureMinFilter.Linear);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int) TextureMagFilter.Nearest);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int) TextureWrapMode.ClampToBorder);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int) TextureWrapMode.ClampToBorder);
float[] borderColor = { 0.0f, 0.0f, 0.0f, 1.0f };
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureBorderColor, borderColor);
// FIXME: should we use mipmaps?
//GL.GenerateMipmap(GenerateMipmapTarget.Texture2D);
}
private static int maxHandle = -1;
private bool disposedValue = false;
protected virtual void Dispose(bool disposing) {
if (!disposedValue) {
GL.DeleteTexture(Handle);
disposedValue = true;
}
}
~Texture() {
if (!disposedValue) {
Console.WriteLine("~Texture(): resource leak? Dispose() should be called manually.");
}
}
public void Dispose() {
Dispose(true);
GC.SuppressFinalize(this);
}
}
public class UiGeometry {
public static Vector2i MIN_WINDOW_SIZE = new(640, 480);
private static CameraInfo activeCamera = CameraInfo.CANON_EOS_R6M2;
public readonly Vector2i WindowSize;
public readonly Box2i ThumbnailBox;
public readonly List<Box2i> ThumbnailBoxes = new();
public readonly Box2i PhotoBox;
public readonly Box2i StatusBox;
public UiGeometry() : this(MIN_WINDOW_SIZE) {}
public UiGeometry(Vector2i windowSize) {
WindowSize = windowSize;
int numThumbnails = 20;
int thumbnailHeight = WindowSize.Y / numThumbnails;
int thumbnailWidth = (int) 1.0 * thumbnailHeight * activeCamera.Resolution.X / activeCamera.Resolution.Y;
for (int i = 0; i < numThumbnails; i++) {
Box2i box = Util.MakeBox(WindowSize.X - thumbnailWidth, i * thumbnailHeight, thumbnailWidth, thumbnailHeight);
ThumbnailBoxes.Add(box);
}
int statusBoxHeight = 20;
int statusBoxPadding = 4;
PhotoBox = new Box2i(0, 0, WindowSize.X - thumbnailWidth, WindowSize.Y - statusBoxHeight - statusBoxPadding);
StatusBox = new Box2i(0, WindowSize.Y - statusBoxHeight, WindowSize.X - thumbnailWidth, WindowSize.Y);
ThumbnailBox = new Box2i(ThumbnailBoxes[0].Min.X, ThumbnailBoxes[0].Min.Y, WindowSize.X, WindowSize.Y);
}
}
public static class Util {
public const float PI = (float) Math.PI;
public static Box2i MakeBox(int left, int top, int width, int height) {
return new Box2i(left, top, left + width, top + height);
}
public static Image<Rgba32> MakeImage(float width, float height) {
return new((int) Math.Ceiling(width), (int) Math.Ceiling(height));
}
// https://sirv.com/help/articles/rotate-photos-to-be-upright/
public static void RotateImageFromExif(Image<Rgba32> image, ushort orientation) {
if (orientation <= 1) {
return;
}
// FIXME: I'm not convinced that all of these are correct, especially the
// cases that involve flipping (because whether you're flipping before or
// after rotation matters.).
var operations = new Dictionary<ushort, (RotateMode, FlipMode)> {
{ 2, (RotateMode.None, FlipMode.Horizontal) },
{ 3, (RotateMode.Rotate180, FlipMode.None) },
{ 4, (RotateMode.None, FlipMode.Vertical) },
{ 5, (RotateMode.Rotate90, FlipMode.Vertical) },
{ 6, (RotateMode.Rotate90, FlipMode.None) },
{ 7, (RotateMode.Rotate270, FlipMode.Vertical) },
{ 8, (RotateMode.Rotate270, FlipMode.None) },
};
var (rotate, flip) = operations[orientation];
image.Mutate(x => x.RotateFlip(rotate, flip));
}
public static Texture RenderText(string text) {
return RenderText(text, 16);
}
public static Texture RenderText(string text, int size) {
Font font = SystemFonts.CreateFont("Consolas", size, FontStyle.Bold);
TextOptions options = new(font);
FontRectangle rect = TextMeasurer.Measure(text, new TextOptions(font));
Image<Rgba32> image = MakeImage(rect.Width, rect.Height);
IBrush brush = Brushes.Solid(Color.White);
// IPen pen = Pens.Solid(Color.Black, 1f);
// image.Mutate(x => x.DrawText(options, text, brush, pen));
image.Mutate(x => x.DrawText(options, text, brush));
Texture texture = new Texture(image);
image.Dispose();
return texture;
}
public static OpenTK.Windowing.Common.Input.Image[] RenderAppIcon() {
int size = 64;
Font font = SystemFonts.CreateFont("MS PMincho", size, FontStyle.Bold);
TextOptions options = new(font);
Image<Rgba32> image = MakeImage(size, size);
IBrush brush = Brushes.Solid(Color.Black);
image.Mutate(x => x.DrawText(options, "撮", brush));
byte[] pixelBytes = new byte[size * size * 4];
image.CopyPixelDataTo(pixelBytes);
image.Dispose();
OpenTK.Windowing.Common.Input.Image opentkImage = new(size, size, pixelBytes);
return new OpenTK.Windowing.Common.Input.Image[]{ opentkImage };
}
public static Texture RenderStar(float radius) {
IPath path = new Star(x: radius, y: radius, prongs: 5, innerRadii: radius * 0.4f, outerRadii: radius, angle: Util.PI);
Image<Rgba32> image = MakeImage(path.Bounds.Width, path.Bounds.Height);
image.Mutate(x => x.Fill(Color.White, path));
Texture texture = new Texture(image);
image.Dispose();
return texture;
}
}
public class Game : GameWindow {
public Game(GameWindowSettings gwSettings, NativeWindowSettings nwSettings) : base(gwSettings, nwSettings) {}
private static Texture TEXTURE_WHITE = new(new Image<Rgba32>(1, 1, new Rgba32(255, 255, 255)));
private static Texture TEXTURE_BLACK = new(new Image<Rgba32>(1, 1, new Rgba32(0, 0, 0)));
UiGeometry geometry = new();
FpsCounter fpsCounter = new();
// Input handling.
long downTimer = Int64.MaxValue;
long upTimer = Int64.MaxValue;
// Four points, each consisting of (x, y, z, tex_x, tex_y).
float[] vertices = new float[20];
// Indices to draw a rectangle from two triangles.
uint[] indices = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
int VertexBufferObject;
int ElementBufferObject;
int VertexArrayObject;
List<Photo> photos = new();
HashSet<int> loadedImages = new();
int photoIndex = 0;
int ribbonIndex = 0;
Shader shader = new();
Matrix4 projection;
float zoomLevel = 0f;
protected override void OnUpdateFrame(FrameEventArgs e) {
base.OnUpdateFrame(e);
long now = DateTime.Now.Ticks;
KeyboardState input = KeyboardState;
// FIXME: add a confirm dialog before closing. (Also for the window-close button.)
// Close when Escape is pressed.
if (input.IsKeyDown(Keys.Escape)) {
Close();
}
// Track keyboard repeat times for advancing up/down.
if (!input.IsKeyDown(Keys.Down)) {
downTimer = Int64.MaxValue;
}
if (!input.IsKeyDown(Keys.Up)) {
upTimer = Int64.MaxValue;
}
// Look for mouse clicks on thumbnails.
//
// Note that we don't bounds-check photoIndex until after all the possible
// inputs that might affect it. That simplifies this logic significantly.
if (MouseState.IsButtonPressed(MouseButton.Button1)) {
for (int i = 0; i < geometry.ThumbnailBoxes.Count; i++) {
Box2i box = geometry.ThumbnailBoxes[i];
if (box.ContainsInclusive((Vector2i) MouseState.Position)) {
photoIndex = ribbonIndex + i;
}
}
}
if (MouseState.IsButtonPressed(MouseButton.Button4)) {
photoIndex--;
}
if (MouseState.IsButtonPressed(MouseButton.Button5)) {
photoIndex++;
}
if (MouseState.ScrollDelta.Y < 0) {
photoIndex++;
}
if (MouseState.ScrollDelta.Y > 0) {
photoIndex--;
}
// FIXME: make a proper Model class for tracking the state of the controls?
if (input.IsKeyPressed(Keys.Down) || now > downTimer) {
downTimer = now + 10000 * 200;
photoIndex++;
}
if (input.IsKeyPressed(Keys.Up) || now > upTimer) {
upTimer = now + 10000 * 200;
photoIndex--;
}
if (input.IsKeyPressed(Keys.Home)) {
photoIndex = 0;
}
if (input.IsKeyPressed(Keys.End)) {
photoIndex = photos.Count - 1;
}
if (input.IsKeyPressed(Keys.PageDown)) {
photoIndex += 10;
}
if (input.IsKeyPressed(Keys.PageUp)) {
photoIndex -= 10;
}
// Make sure the photoIndex is actually valid.
photoIndex = Math.Clamp(photoIndex, 0, photos.Count - 1);
if (input.IsKeyDown(Keys.D0) || input.IsKeyDown(Keys.GraveAccent)) {
zoomLevel = 0f;
}
if (input.IsKeyDown(Keys.D1)) {
zoomLevel = 1f;
}
if (input.IsKeyDown(Keys.D2)) {
zoomLevel = 2f;
}
if (input.IsKeyDown(Keys.D3)) {
zoomLevel = 4f;
}
if (input.IsKeyDown(Keys.D4)) {
zoomLevel = 8f;
}
if (input.IsKeyDown(Keys.D5)) {
zoomLevel = 16f;
}
}
protected override void OnLoad() {
base.OnLoad();
GL.ClearColor(0f, 0f, 0f, 1f);
GL.Enable(EnableCap.Blend);
GL.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha);
VertexArrayObject = GL.GenVertexArray();
GL.BindVertexArray(VertexArrayObject);
VertexBufferObject = GL.GenBuffer();
ElementBufferObject = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObject);
GL.BufferData(BufferTarget.ArrayBuffer, vertices.Length * sizeof(float), vertices, BufferUsageHint.DynamicDraw);
GL.BindBuffer(BufferTarget.ElementArrayBuffer, ElementBufferObject);
GL.BufferData(BufferTarget.ElementArrayBuffer, indices.Length * sizeof(uint), indices, BufferUsageHint.DynamicDraw);
shader.Init();
shader.Use();
// Because there's 5 floats between the start of the first vertex and the start of the second,
// the stride is 5 * sizeof(float).
// This will now pass the new vertex array to the buffer.
var vertexLocation = shader.GetAttribLocation("aPosition");
GL.EnableVertexAttribArray(vertexLocation);
GL.VertexAttribPointer(vertexLocation, 3, VertexAttribPointerType.Float, false, 5 * sizeof(float), 0);
// Next, we also setup texture coordinates. It works in much the same way.
// We add an offset of 3, since the texture coordinates comes after the position data.
// We also change the amount of data to 2 because there's only 2 floats for texture coordinates.
var texCoordLocation = shader.GetAttribLocation("aTexCoord");
GL.EnableVertexAttribArray(texCoordLocation);
GL.VertexAttribPointer(texCoordLocation, 2, VertexAttribPointerType.Float, false, 5 * sizeof(float), 3 * sizeof(float));
// Load photos from a directory.
// string[] files = Directory.GetFiles(@"c:\users\colin\desktop\photos-test\");
// string[] files = Directory.GetFiles(@"c:\users\colin\pictures\photos\2023\07\14\");
// string[] files = Directory.GetFiles(@"G:\DCIM\100EOSR6\");
// string[] files = Directory.GetFiles(@"C:\Users\colin\Pictures\photos\2018\06\23");
string[] files = Directory.GetFiles(@"C:\Users\colin\Desktop\Germany all\105D7000");
// string[] files = Directory.GetFiles(@"C:\Users\colin\Desktop\many-birds\");
for (int i = 0; i < files.Count(); i++) {
string file = files[i];
if (file.ToLower().EndsWith(".jpg")) {
Photo photo = new Photo(file, TEXTURE_BLACK);
photos.Add(photo);
}
}
photos.Sort(ComparePhotosByDate);
}
private static int ComparePhotosByDate(Photo x, Photo y) {
int compare = x.DateTimeOriginal.CompareTo(y.DateTimeOriginal);
if (compare != 0) {
return compare;
}
// If the photos have the same seconds value, sort by filename
// (since cameras usually increment the filename for successive shots.)
return x.Filename.CompareTo(y.Filename);
}
protected override void OnUnload() {
base.OnUnload();
}
private async void LoadAndUnloadImagesAsync() {
int minUnloadedImage = Math.Max(0, photoIndex - 40);
int maxUnloadedImage = Math.Min(photoIndex + 40, photos.Count - 1);
int minLoadedImage = Math.Max(0, photoIndex - 20);
int maxLoadedImage = Math.Min(photoIndex + 20, photos.Count - 1);
// First, unload images that are far outside our window.
// FIXME: also cancel any in-progress loading tasks that have moved outside our window.
// FIXME: maybe use an LRU cache for evicting images?
// FIXME: keep around thumbnail-sized textures?
foreach (int i in loadedImages) {
if (i < minUnloadedImage || i > maxUnloadedImage) {
// Console.WriteLine("unloading " + i);
loadedImages.Remove(i);
photos[i].UnloadAsync();
}
}
// Then, start loading any images that aren't in our window.
for (int i = minLoadedImage; i <= maxLoadedImage; i++) {
if (!loadedImages.Contains(i)) {
// Console.WriteLine("loading " + i);
loadedImages.Add(i);
await Task.Run( () => { photos[i].LoadAsync(); });
}
}
}
protected override void OnRenderFrame(FrameEventArgs e) {
base.OnRenderFrame(e);
fpsCounter.Update();
LoadAndUnloadImagesAsync();
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.BindBuffer(BufferTarget.ArrayBuffer, VertexBufferObject); HashSet<int> loadedImages = new();
GL.ActiveTexture(TextureUnit.Texture0);
Photo activePhoto = photos[photoIndex];
Texture active = activePhoto.Texture();
// FIXME: make a function for scaling & centering one box on another.
float scaleX = 1f * geometry.PhotoBox.Size.X / active.Size.X;
float scaleY = 1f * geometry.PhotoBox.Size.Y / active.Size.Y;
float scale = Math.Min(scaleX, scaleY);
if (zoomLevel > 0f) {
scale = zoomLevel;
}
Vector2i renderSize = (Vector2i) (((Vector2) active.Size) * scale);
Vector2i center = (Vector2i) geometry.PhotoBox.Center;
Box2i photoBox = Util.MakeBox(center.X - renderSize.X / 2, center.Y - renderSize.Y / 2, renderSize.X, renderSize.Y);
DrawTexture(active, photoBox);
// Draw thumbnail boxes.
ribbonIndex = Math.Clamp(photoIndex - (geometry.ThumbnailBoxes.Count - 1) / 2, 0, Math.Max(0, photos.Count - geometry.ThumbnailBoxes.Count));
DrawFilledBox(geometry.ThumbnailBox, Color4.Black);
for (int i = 0; i < geometry.ThumbnailBoxes.Count; i++) {
if (ribbonIndex + i >= photos.Count) {
break;
}
Box2i box = geometry.ThumbnailBoxes[i];
DrawTexture(photos[ribbonIndex + i].Texture(), box);
if (ribbonIndex + i == photoIndex) {
DrawBox(box, 5, Color4.Black);
DrawBox(box, 3, Color4.White);
}
}
// Draw status box.
DrawFilledBox(geometry.StatusBox, Color4.Black);
DrawText(activePhoto.Description(), geometry.StatusBox.Min.X + 80, geometry.StatusBox.Min.Y);
DrawText($" FPS: {fpsCounter.Fps}", geometry.StatusBox.Max.X - 76, geometry.StatusBox.Min.Y);
if (activePhoto.Loaded) {
DrawText($"{(scale * 100):F1}%", geometry.StatusBox.Min.X, geometry.StatusBox.Min.Y);
}
SwapBuffers();
}
void DrawTexture(Texture texture, Box2i box) {
DrawTexture(texture, box, Color4.White);
}
void DrawTexture(Texture texture, Box2i box, Color4 color) {
GL.Uniform4(shader.GetUniformLocation("color"), color);
SetVertices(box.Min.X, box.Min.Y, box.Size.X, box.Size.Y);
GL.BufferData(BufferTarget.ArrayBuffer, vertices.Length * sizeof(float), vertices, BufferUsageHint.DynamicDraw);
GL.BindTexture(TextureTarget.Texture2D, texture.Handle);
GL.DrawElements(PrimitiveType.Triangles, indices.Length, DrawElementsType.UnsignedInt, 0);
}
void DrawBox(Box2i box, int thickness, Color4 color) {
DrawTexture(TEXTURE_WHITE, Util.MakeBox(box.Min.X, box.Min.Y, box.Size.X, thickness), color);
DrawTexture(TEXTURE_WHITE, Util.MakeBox(box.Min.X, box.Min.Y, thickness, box.Size.Y), color);
DrawTexture(TEXTURE_WHITE, Util.MakeBox(box.Min.X, box.Max.Y - thickness, box.Size.X, thickness), color);
DrawTexture(TEXTURE_WHITE, Util.MakeBox(box.Max.X - thickness, box.Min.Y, thickness, box.Size.Y), color);
}
void DrawFilledBox(Box2i box, Color4 color) {
DrawTexture(TEXTURE_WHITE, Util.MakeBox(box.Min.X, box.Min.Y, box.Size.X, box.Size.Y), color);
}
void DrawText(string text, int x, int y) {
Texture label = Util.RenderText(text);
DrawTexture(label, Util.MakeBox(x, y, label.Size.X, label.Size.Y));
label.Dispose();
}
protected override void OnResize(ResizeEventArgs e) {
base.OnResize(e);
Console.WriteLine($"OnResize: {e.Width}x{e.Height}");
geometry = new UiGeometry(e.Size);
projection = Matrix4.CreateOrthographicOffCenter(0f, e.Width, e.Height, 0f, -1f, 1f);
GL.UniformMatrix4(shader.GetUniformLocation("projection"), true, ref projection);
GL.Viewport(0, 0, e.Width, e.Height);
}
private void SetVertices(float left, float top, float width, float height) {
// top left
vertices[0] = left;
vertices[1] = top;
vertices[2] = 0f;
vertices[3] = 0f;
vertices[4] = 0f;
// top right
vertices[5] = left + width;
vertices[6] = top;
vertices[7] = 0f;
vertices[8] = 1f;
vertices[9] = 0f;
// bottom right
vertices[10] = left + width;
vertices[11] = top + height;
vertices[12] = 0f;
vertices[13] = 1f;
vertices[14] = 1f;
// bottom left
vertices[15] = left;
vertices[16] = top + height;
vertices[17] = 0f;
vertices[18] = 0f;
vertices[19] = 1f;
}
}
static class Program {
static void Main(string[] args) {
List<MonitorInfo> monitors = Monitors.GetMonitors();
MonitorInfo bestMonitor = monitors[0];
int bestResolution = bestMonitor.HorizontalResolution * bestMonitor.VerticalResolution;
for (int i = 1; i < monitors.Count; i++) {
MonitorInfo monitor = monitors[i];
int resolution = monitor.HorizontalResolution * monitor.VerticalResolution;
if (resolution > bestResolution) {
bestResolution = resolution;
bestMonitor = monitor;
}
}
Console.WriteLine($"best monitor: {bestMonitor.HorizontalResolution}x{bestMonitor.VerticalResolution}");
GameWindowSettings gwSettings = new();
gwSettings.UpdateFrequency = 30.0;
gwSettings.RenderFrequency = 30.0;
NativeWindowSettings nwSettings = new();
nwSettings.WindowState = WindowState.Normal;
nwSettings.CurrentMonitor = bestMonitor.Handle;
nwSettings.Location = new Vector2i(bestMonitor.WorkArea.Min.X + 1, bestMonitor.WorkArea.Min.Y + 31);
nwSettings.Size = new Vector2i(bestMonitor.WorkArea.Size.X - 2, bestMonitor.WorkArea.Size.Y - 32);
nwSettings.MinimumSize = UiGeometry.MIN_WINDOW_SIZE;
nwSettings.Title = "Totte";
nwSettings.IsEventDriven = false;
nwSettings.Icon = new WindowIcon(Util.RenderAppIcon());
using (Game game = new(gwSettings, nwSettings)) {
game.Run();
}
}
}