154 lines
4.7 KiB
Plaintext
154 lines
4.7 KiB
Plaintext
|
#import "ViewController.h"
|
||
|
#import "Renderer.h"
|
||
|
#include "imgui.h"
|
||
|
|
||
|
#if TARGET_OS_OSX
|
||
|
#include "imgui_impl_osx.h"
|
||
|
#endif
|
||
|
|
||
|
@interface ViewController ()
|
||
|
@property (nonatomic, readonly) MTKView *mtkView;
|
||
|
@property (nonatomic, strong) Renderer *renderer;
|
||
|
@end
|
||
|
|
||
|
@implementation ViewController
|
||
|
|
||
|
- (MTKView *)mtkView {
|
||
|
return (MTKView *)self.view;
|
||
|
}
|
||
|
|
||
|
- (void)viewDidLoad
|
||
|
{
|
||
|
[super viewDidLoad];
|
||
|
|
||
|
self.mtkView.device = MTLCreateSystemDefaultDevice();
|
||
|
|
||
|
if (!self.mtkView.device) {
|
||
|
NSLog(@"Metal is not supported");
|
||
|
abort();
|
||
|
}
|
||
|
|
||
|
self.renderer = [[Renderer alloc] initWithView:self.mtkView];
|
||
|
|
||
|
[self.renderer mtkView:self.mtkView drawableSizeWillChange:self.mtkView.bounds.size];
|
||
|
|
||
|
self.mtkView.delegate = self.renderer;
|
||
|
|
||
|
#if TARGET_OS_OSX
|
||
|
// Add a tracking area in order to receive mouse events whenever the mouse is within the bounds of our view
|
||
|
NSTrackingArea *trackingArea = [[NSTrackingArea alloc] initWithRect:NSZeroRect
|
||
|
options:NSTrackingMouseMoved | NSTrackingInVisibleRect | NSTrackingActiveAlways
|
||
|
owner:self
|
||
|
userInfo:nil];
|
||
|
[self.view addTrackingArea:trackingArea];
|
||
|
|
||
|
// If we want to receive key events, we either need to be in the responder chain of the key view,
|
||
|
// or else we can install a local monitor. The consequence of this heavy-handed approach is that
|
||
|
// we receive events for all controls, not just Dear ImGui widgets. If we had native controls in our
|
||
|
// window, we'd want to be much more careful than just ingesting the complete event stream, though we
|
||
|
// do make an effort to be good citizens by passing along events when Dear ImGui doesn't want to capture.
|
||
|
NSEventMask eventMask = NSEventMaskKeyDown | NSEventMaskKeyUp | NSEventMaskFlagsChanged | NSEventTypeScrollWheel;
|
||
|
[NSEvent addLocalMonitorForEventsMatchingMask:eventMask handler:^NSEvent * _Nullable(NSEvent *event) {
|
||
|
BOOL wantsCapture = ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
if (event.type == NSEventTypeKeyDown && wantsCapture) {
|
||
|
return nil;
|
||
|
} else {
|
||
|
return event;
|
||
|
}
|
||
|
|
||
|
}];
|
||
|
|
||
|
ImGui_ImplOSX_Init();
|
||
|
#endif
|
||
|
}
|
||
|
|
||
|
#if TARGET_OS_OSX
|
||
|
|
||
|
- (void)mouseMoved:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)mouseDown:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)rightMouseDown:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)otherMouseDown:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)mouseUp:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)rightMouseUp:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)otherMouseUp:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)mouseDragged:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)rightMouseDragged:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)otherMouseDragged:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
- (void)scrollWheel:(NSEvent *)event {
|
||
|
ImGui_ImplOSX_HandleEvent(event, self.view);
|
||
|
}
|
||
|
|
||
|
#elif TARGET_OS_IOS
|
||
|
|
||
|
// This touch mapping is super cheesy/hacky. We treat any touch on the screen
|
||
|
// as if it were a depressed left mouse button, and we don't bother handling
|
||
|
// multitouch correctly at all. This causes the "cursor" to behave very erratically
|
||
|
// when there are multiple active touches. But for demo purposes, single-touch
|
||
|
// interaction actually works surprisingly well.
|
||
|
- (void)updateIOWithTouchEvent:(UIEvent *)event {
|
||
|
UITouch *anyTouch = event.allTouches.anyObject;
|
||
|
CGPoint touchLocation = [anyTouch locationInView:self.view];
|
||
|
ImGuiIO &io = ImGui::GetIO();
|
||
|
io.MousePos = ImVec2(touchLocation.x, touchLocation.y);
|
||
|
|
||
|
BOOL hasActiveTouch = NO;
|
||
|
for (UITouch *touch in event.allTouches) {
|
||
|
if (touch.phase != UITouchPhaseEnded && touch.phase != UITouchPhaseCancelled) {
|
||
|
hasActiveTouch = YES;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
io.MouseDown[0] = hasActiveTouch;
|
||
|
}
|
||
|
|
||
|
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
|
||
|
[self updateIOWithTouchEvent:event];
|
||
|
}
|
||
|
|
||
|
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
|
||
|
[self updateIOWithTouchEvent:event];
|
||
|
}
|
||
|
|
||
|
- (void)touchesCancelled:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
|
||
|
[self updateIOWithTouchEvent:event];
|
||
|
}
|
||
|
|
||
|
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
|
||
|
[self updateIOWithTouchEvent:event];
|
||
|
}
|
||
|
|
||
|
#endif
|
||
|
|
||
|
@end
|
||
|
|