From 5591dc4d1a007c325ebea19286f1761e5c0d22a7 Mon Sep 17 00:00:00 2001 From: Vansh Gandhi Date: Sun, 14 May 2017 23:38:15 -0700 Subject: [PATCH] Working on UI and UX --- Cartfile.resolved | 5 +- .../Contents/Resources/DWARF/SwiftProtobuf | Bin 5788137 -> 5788137 bytes .../Versions/A/SwiftProtobuf | Bin 2989420 -> 2989420 bytes MacAssistant.xcodeproj/project.pbxproj | 8 ++ MacAssistant/API.swift | 20 ++-- MacAssistant/AppDelegate.swift | 16 ++- MacAssistant/AssistantView.xib | 83 +++++-------- MacAssistant/AssistantViewController.swift | 109 ++++++++---------- MacAssistant/LoginView.xib | 6 +- MacAssistant/WaveformView.swift | 87 +++++++++++++- 10 files changed, 203 insertions(+), 131 deletions(-) diff --git a/Cartfile.resolved b/Cartfile.resolved index e477dd0..4185b75 100644 --- a/Cartfile.resolved +++ b/Cartfile.resolved @@ -1,8 +1,7 @@ github "Alamofire/Alamofire" "4.4.0" -github "Clipy/Magnet" "v2.0.0" -github "OAuthSwift/OAuthSwift" "1.1.1" +github "Clipy/Magnet" "v2.0.1" github "Octree/SwiftyWave" "0.0.2" github "SwiftyJSON/SwiftyJSON" "3.1.4" github "apple/swift-protobuf" "0.9.901" github "audiokit/AudioKit" "v3.6" -github "grpc/grpc-swift" "b570094bd0ac26b3b41d062f1f219c6119d058a3" +github "grpc/grpc-swift" "a8a2c1892b80f961dc2225befccfd1da329a0d4b" diff --git a/Carthage/Build/Mac/SwiftProtobuf.framework.dSYM/Contents/Resources/DWARF/SwiftProtobuf b/Carthage/Build/Mac/SwiftProtobuf.framework.dSYM/Contents/Resources/DWARF/SwiftProtobuf index 74a2040427fd0c04921f0ed0adbd294b81b7f9a4..f41c72df524749463ef5b9f6ab1765beef418235 100644 GIT binary patch delta 372 zcmWm7SvEoe007XV4CR?KCGj$=3?Wm79+_t$`|)8PY`~x0ILBwZaDCkG+x+~EHTg<} z@sB9|7KHCX*g}jrt+dfj2MLmN63|6AJ@nE?KLex~WQbu#7$waZ8OF&n!6g5Gjwz;@ zVU{`OSzwVo1&S=O%nGZNSYw?H%51X5HaqN6VUK+dIHXF%5y#Xx;gmDZx!{s3uDRis UJMMYlkvdO2^Frf02w#6nf1k2d9{>OV delta 372 zcmWm7$u>d(007X_3#EsYd45ESQ0AGGd7k}@m4y%S19tw#IhOr}+v2Wo!i*0t;WsiLhIHbxEHR>F5!YOB*bHOE7Tyw)M Ucii*9BTqc@!s|~Mz5Q1H0W(}w(f|Me diff --git a/Carthage/Build/Mac/SwiftProtobuf.framework/Versions/A/SwiftProtobuf b/Carthage/Build/Mac/SwiftProtobuf.framework/Versions/A/SwiftProtobuf index 72ca5e43e20989cda1a6baf176da28f70a2c1ed2..4303654b8da789e4e8273420857e78644af24076 100755 GIT binary patch delta 844 zcmXw$Plysx9LI5<>!zc-mcIAKCadl5mOI^|gBVCekO&=uBJ~vPA@NiZ6a#yBu!Jmo z=r9}NrGHlDS`iry(lKaoJBMq~X^GL(QXNF<_kOb-zRZWu@6UU@8Gd~48$yISo%(~CAxT!%_1Y}gr6y^>it><}b69gD%*6Kc3^Np@T9nldY)hYIE2q&p zH(5T6PG4dxLGm1yH1D%W0LflurD-JHVinI->#XJVr5h~b$7-$#he@)Wb=E~yZL>V8 zPX8_IaV$2!K$|=v<**(hFM!4y%m^W+zOjklqwlO2^r~B|q9T>ISw=x>{$gF9=l;ko zuVG7i%({j*G0!Ks*%~kKlKfSjg26`=i@9^Wu8^1D-1B@8LUJx~xTkdbWj>h2iSczl z2q5)ua8r?F6)HRH>WZUIFFSxS`^}y~iBxB&)dwA4IX5`XHU`8{7ZM z`NS*JWF1`kGw;Qb)J+aQTe`we-VGw@+uRJ3Hn`ef-daSLThvNZ=$d6M6GU=vX?ewa zi#u9fL)Tx`N;~iKKr^pNw``9zi2Uw_=#YJoBxFD20OTNK4sr-`7;*%16mkr595N3% O0XYdtb?ni?oAy5lgFjyY delta 844 zcmXw$O=uHA7>3DCnwq~hn#{~56}~>Jl#^LPNlVpqS(&^x%?{il8j?Fi@nF z9z1L%Dnqi-bsu2{9>BVz=1yHoaCMXKc->SJDQVv$Z9!l5TsI%;*Im&u;vy<#0z+h8Sv z)_cRcT$yUqV49C`vlGnls{B>$vcZQG*A=hvhKA-}=lvL(cay_&%FD91cz+U41owD9 zg4VmwZB3d}uI;RIYrK#om*vwv;iVk81Xf(*MjQ`3eaYdO-JqaU2=yyuou_|WP$xa4mbcD1P%dHz+vDBa1=NO90yJS)4)mK N6p(Mb! 0 { - buf.append(response.audioOut.audioData) + if !response.result.spokenRequestText.isEmpty { + self.delegate.updateRequestText(response.result.spokenRequestText) } + self.delegate.updateResponseText(response.result.spokenResponseText.isEmpty ? "Speaking response..." : response.result.spokenResponseText) + if response.audioOut.audioData.count > 0 { buf.append(response.audioOut.audioData) } if response.eventType == .endOfUtterance { self.delegate.stopListening() } } if let error = error { print("Initial receive error: \(error)") } } - func initiateRequest() { + func initiateRequest(volumePercent: Int32) { var request = ConverseRequest() request.config = ConverseConfig() @@ -69,7 +68,7 @@ class API { var audioOutConfig = AudioOutConfig() audioOutConfig.sampleRateHertz = Int32(Constants.GOOGLE_SAMPLE_RATE) // TODO: Play back the response and find the appropriate value audioOutConfig.encoding = .mp3 - audioOutConfig.volumePercentage = 50 + audioOutConfig.volumePercentage = volumePercent request.config.audioOutConfig = audioOutConfig do { @@ -92,6 +91,7 @@ class API { func doneSpeaking() { do { try currentCall?.closeSend { print("Closed send") } + // Receive all response audio responses DispatchQueue.global().async { while true { do { @@ -113,6 +113,12 @@ class API { } } + func donePlayingResponse() { + if followUp { + delegate.startListening() + } + } + func debugPrint(result: ConverseReponse) { print("\n++++++++++++++++++++++++++++++") print("Close receive result error: \(result.error.code)") diff --git a/MacAssistant/AppDelegate.swift b/MacAssistant/AppDelegate.swift index 1447f17..76da7bc 100644 --- a/MacAssistant/AppDelegate.swift +++ b/MacAssistant/AppDelegate.swift @@ -27,7 +27,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { super.init() popover.contentViewController = NSViewController(nibName: "LoadingView", bundle: nil) // popover.appearance = NSAppearance(named: NSAppearanceNameVibrantDark) - registerHotkey() +// registerHotkey() // TODO: Proper interaction between hotkey and window } func applicationWillFinishLaunching(_ notification: Notification) { @@ -65,14 +65,17 @@ class AppDelegate: NSObject, NSApplicationDelegate { keyCombo: keyCombo, target: self, action: #selector(AppDelegate.hotkeyPressed)) - hotKey.register() + hotKey.register() } func hotkeyPressed(sender: AnyObject?) { - if (!popover.isShown) { + if !popover.isShown { showPopover(sender: sender) + } else if let controller = popover.contentViewController as? AssistantViewController { + if controller.isListening { + controller.stopListening() + } } - if (isLoggedIn) { (popover.contentViewController as? AssistantViewController)?.startListening() } @@ -84,12 +87,15 @@ class AppDelegate: NSObject, NSApplicationDelegate { func showPopover(sender: AnyObject?) { if let button = statusItem.button { - popover.show(relativeTo: button.bounds, of: button, preferredEdge: NSRectEdge.minY) + popover.show(relativeTo: button.bounds, of: button, preferredEdge: .minY) } } func closePopover(sender: AnyObject?) { popover.performClose(sender) + if let controller = popover.contentViewController as? AssistantViewController { + controller.stopListening() + } } func togglePopover(sender: AnyObject?) { diff --git a/MacAssistant/AssistantView.xib b/MacAssistant/AssistantView.xib index a15fcd5..2580a8d 100644 --- a/MacAssistant/AssistantView.xib +++ b/MacAssistant/AssistantView.xib @@ -9,9 +9,8 @@ - - - + + @@ -19,15 +18,15 @@ - + - - + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + - + @@ -127,5 +107,6 @@ + diff --git a/MacAssistant/AssistantViewController.swift b/MacAssistant/AssistantViewController.swift index 0c46eaa..f33aaea 100644 --- a/MacAssistant/AssistantViewController.swift +++ b/MacAssistant/AssistantViewController.swift @@ -10,12 +10,13 @@ import Cocoa import AudioKit import AVFoundation -class AssistantViewController: NSViewController, NSTableViewDelegate, NSTableViewDataSource, ConversationTextDelegate { +class AssistantViewController: NSViewController, ConversationTextDelegate, AVAudioPlayerDelegate { @IBOutlet weak var waveformView: CustomPlot! @IBOutlet weak var microphoneButton: NSButton! - @IBOutlet weak var tableView: NSTableView! + @IBOutlet weak var speakerButton: NSButton! + @IBOutlet weak var spokenTextLabel: NSTextField! private var player: AVAudioPlayer? private let googleColors = [NSColor.red, NSColor.blue, NSColor.yellow, NSColor.green] @@ -35,74 +36,40 @@ class AssistantViewController: NSViewController, NSTableViewDelegate, NSTableVie private lazy var outputBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer(pcmFormat: self.desiredFormat, frameCapacity: AVAudioFrameCount(Constants.GOOGLE_SAMPLES_PER_FRAME)) + public var isListening: Bool { get { return AudioKit.engine.isRunning } } + override func viewDidLoad() { super.viewDidLoad() - loadFakeData() setupPlot() - tableView.delegate = self - tableView.dataSource = self AudioKit.output = AKBooster(mic, gain: 0) AudioKit.engine.inputNode?.installTap(onBus: 0, bufferSize: UInt32(Constants.NATIVE_SAMPLES_PER_FRAME), format: nil, block: onTap) } - private func tableView(_ tableView: NSTableView, viewFor tableColumn: NSTableColumn?, row: Int) -> NSCell? { - let convo = conversation[row] - let cell = NSTextFieldCell(textCell: convo.text) - cell.alignment = convo.fromUser ? .right : .left - cell.textColor = NSColor.white - print("configuring") - return cell - } - - - - // TODO -// func tableView(_ tableView: NSTableView, objectValueFor tableColumn: NSTableColumn?, row: Int) -> Any? { -// return conversation[row].text -// let convo = conversation[row] -// if tableColumn?.identifier == "rightColumn" { -// if convo.fromUser { -// return convo.text -// } -// } -// if tableColumn?.identifier == "leftColumn" { -// if !convo.fromUser { -// return convo.text -// } -// } -// return nil -// } - - func numberOfRows(in tableView: NSTableView) -> Int { return conversation.count } - public func onTap(buffer: AVAudioPCMBuffer, _: AVAudioTime) { - if let _ = buffer.floatChannelData { - var err: NSError? - converter.convert(to: outputBuffer, error: &err) { packetCount, inputStatusPtr in - inputStatusPtr.pointee = .haveData - return buffer - } - - if let error = err { - print("Conversion error \(error)") - } else { - if let data = outputBuffer.int16ChannelData { - self.api.sendAudio(frame: data, withLength: Int(outputBuffer.frameLength)) - } - } + var error: NSError? + converter.convert(to: outputBuffer, error: &error) { _, inputStatusPtr in + inputStatusPtr.pointee = .haveData + return buffer + } + + if let error = error { print("Conversion error \(error)") } + else if let data = outputBuffer.int16ChannelData { + self.api.sendAudio(frame: data, withLength: Int(outputBuffer.frameLength)) } } func setupPlot() { waveformView.setClickListener(h: buttonAction) - plot.shouldFill = false + plot.shouldFill = true plot.shouldMirror = true plot.color = googleColors[0] plot.backgroundColor = NSColor.clear plot.autoresizingMask = .viewWidthSizable + plot.shouldOptimizeForRealtimePlot = true + plot.plotType = .buffer waveformView.addSubview(plot) Timer.scheduledTimer(timeInterval: 0.75, target: self, selector: #selector(self.updatePlotWaveformColor), userInfo: nil, repeats: true); } @@ -115,18 +82,23 @@ class AssistantViewController: NSViewController, NSTableViewDelegate, NSTableVie @IBAction func buttonAction(_ sender: Any) { if AudioKit.engine.isRunning { stopListening() - } else { + } else if !(player?.isPlaying ?? false) { startListening() } } func startListening() { - api.initiateRequest() + api.initiateRequest(volumePercent: Int32(mic.volume * 100)) AudioKit.start() + let file = Bundle.main.url(forResource: "begin_prompt", withExtension: "mp3")! + player = try! AVAudioPlayer(contentsOf: file) + player!.play() DispatchQueue.main.async { self.microphoneButton.isHidden = true self.plot.isHidden = false + self.speakerButton.isHidden = true } + spokenTextLabel.stringValue = "" } func stopListening() { @@ -135,33 +107,47 @@ class AssistantViewController: NSViewController, NSTableViewDelegate, NSTableVie DispatchQueue.main.async { self.microphoneButton.isHidden = false self.plot.isHidden = true + self.speakerButton.isHidden = true } } - // TODO func playResponse(_ data: Data) { do { player = try AVAudioPlayer(data: data, fileTypeHint: AVFileTypeMPEGLayer3) player?.play() - - } catch { print("Audio out error \(error):\(error.localizedDescription)") } + player?.delegate = self + speakerIcon(isShown: true) + } catch { + print("Audio out error \(error):\(error.localizedDescription)") + speakerIcon(isShown: false) + } + } + + func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) { + speakerIcon(isShown: false) + api.donePlayingResponse() + } + + func audioPlayerDecodeErrorDidOccur(_ player: AVAudioPlayer, error: Error?) { + speakerIcon(isShown: false) } - func loadFakeData() { - for i in 0...10 { - conversation.append(ConversationEntry(text: "User \(i)", fromUser: true)) - conversation.append(ConversationEntry(text: "Response \(i)", fromUser: false)) + func speakerIcon(isShown: Bool) { + DispatchQueue.main.async { + self.speakerButton.isHidden = !isShown + self.microphoneButton.isHidden = isShown + self.plot.isHidden = true } } func updateRequestText(_ text: String) { + print("Request text: \(text)") + spokenTextLabel.stringValue = "\"\(text)\"" conversation.append(ConversationEntry(text: text, fromUser: true)) - tableView.reloadData() } func updateResponseText(_ text: String) { conversation.append(ConversationEntry(text: text, fromUser: false)) - tableView.reloadData() } @IBAction func gearClicked(_ sender: Any) { @@ -169,6 +155,7 @@ class AssistantViewController: NSViewController, NSTableViewDelegate, NSTableVie } @IBAction func actionClicked(_ sender: Any) { + } @IBAction func settingsClicked(_ sender: Any) { diff --git a/MacAssistant/LoginView.xib b/MacAssistant/LoginView.xib index ba46fc3..da32f52 100644 --- a/MacAssistant/LoginView.xib +++ b/MacAssistant/LoginView.xib @@ -16,11 +16,11 @@ - + - + @@ -28,7 +28,7 @@ - + diff --git a/MacAssistant/WaveformView.swift b/MacAssistant/WaveformView.swift index 9a63ad8..9238664 100644 --- a/MacAssistant/WaveformView.swift +++ b/MacAssistant/WaveformView.swift @@ -6,4 +6,89 @@ // Copyright © 2017 vanshgandhi. All rights reserved. // -import Foundation +// +// WaveformView.swift +// WaveformView +// +// Created by Jonathan on 3/14/15. +// Copyright (c) 2015 Underwood. All rights reserved. +// + +import Cocoa +import Darwin + +let pi = Double.pi + +public class WaveformView: NSView { + + fileprivate var _phase: CGFloat = 0.0 + fileprivate var _amplitude: CGFloat = 0.3 + + @IBInspectable public var waveColor: NSColor = .black + @IBInspectable public var numberOfWaves = 5 + @IBInspectable public var primaryWaveLineWidth: CGFloat = 3.0 + @IBInspectable public var secondaryWaveLineWidth: CGFloat = 1.0 + @IBInspectable public var idleAmplitude: CGFloat = 0.01 + @IBInspectable public var frequency: CGFloat = 1.25 + @IBInspectable public var density: CGFloat = 5 + @IBInspectable public var phaseShift: CGFloat = -0.15 + + @IBInspectable public var amplitude: CGFloat { + get { + return _amplitude + } + } + + public func updateWithLevel(_ level: CGFloat) { + _phase += phaseShift + _amplitude = fmax(level, idleAmplitude) + needsDisplay = true + } + + override public func draw(_ dirtyRect: NSRect) { + super.draw(dirtyRect) + + let context = NSGraphicsContext.current()!.cgContext + context.clear(bounds) + + // backgroundColor?.set() + context.fill(dirtyRect) + + // Draw multiple sinus waves, with equal phases but altered + // amplitudes, multiplied by a parable function. + for waveNumber in 0...numberOfWaves { + context.setLineWidth((waveNumber == 0 ? primaryWaveLineWidth : secondaryWaveLineWidth)) + + let halfHeight = bounds.height / 2.0 + let width = bounds.width + let mid = width / 2.0 + + let maxAmplitude = halfHeight - 4.0 // 4 corresponds to twice the stroke width + // Progress is a value between 1.0 and -0.5, determined by the current wave idx, + // which is used to alter the wave's amplitude. + let progress: CGFloat = 1.0 - CGFloat(waveNumber) / CGFloat(numberOfWaves) + let normedAmplitude = (1.5 * progress - 0.5) * amplitude + + let multiplier: CGFloat = 1.0 + waveColor.withAlphaComponent(multiplier * waveColor.cgColor.alpha).set() + + var x: CGFloat = 0.0 + while x < width + density { + // Use a parable to scale the sinus wave, that has its peak in the middle of the view. + let scaling = -pow(1 / mid * (x - mid), 2) + 1 + let tempCasting: CGFloat = 2.0 * CGFloat(pi) * CGFloat(x / width) * frequency + _phase + let y = scaling * maxAmplitude * normedAmplitude * CGFloat(sinf(Float(tempCasting))) + halfHeight + + if x == 0 { + context.move(to: CGPoint(x: x, y: y)) + } else { + context.addLine(to: CGPoint(x: x, y: y)) + } + + x += density + } + + context.strokePath() + } + } +}