Skip to content

Demo app fixes #6170

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ extension UIImage {
return UIGraphicsImageRenderer(size: newSize, format: format).image {
_ in draw(in: CGRect(origin: .zero, size: newSize))
}
return image
}

func toRGBArray() -> [UInt8]? {
Expand Down Expand Up @@ -180,33 +179,34 @@ struct ContentView: View {
}
}
.navigationBarTitle(title, displayMode: .inline)
.navigationBarItems(leading:
Button(action: {
showingSettings.toggle()
}) {
Image(systemName: "gearshape")
.imageScale(.large)
})
.navigationBarItems(trailing:
HStack {
Menu {
Section(header: Text("Memory")) {
Text("Used: \(resourceMonitor.usedMemory) Mb")
Text("Available: \(resourceMonitor.availableMemory) Mb")
}
} label: {
Text("\(resourceMonitor.usedMemory) Mb")
}
.onAppear {
resourceMonitor.start()
}
.onDisappear {
resourceMonitor.stop()
}
Button(action: { showingLogs = true }) {
Image(systemName: "list.bullet.rectangle")
}
}
.navigationBarItems(
leading:
Button(action: {
showingSettings.toggle()
}) {
Image(systemName: "gearshape")
.imageScale(.large)
},
trailing:
HStack {
Menu {
Section(header: Text("Memory")) {
Text("Used: \(resourceMonitor.usedMemory) Mb")
Text("Available: \(resourceMonitor.usedMemory) Mb")
}
} label: {
Text("\(resourceMonitor.usedMemory) Mb")
}
.onAppear {
resourceMonitor.start()
}
.onDisappear {
resourceMonitor.stop()
}
Button(action: { showingLogs = true }) {
Image(systemName: "list.bullet.rectangle")
}
}
)
.sheet(isPresented: $showingLogs) {
NavigationView {
Expand Down Expand Up @@ -247,20 +247,24 @@ struct ContentView: View {
isGenerating = true
shouldStopGenerating = false
shouldStopShowingToken = false
let text = prompt
let text = prompt.trimmingCharacters(in: .whitespacesAndNewlines)
let seq_len = 768 // text: 256, vision: 768
let modelPath = resourceManager.modelPath
let tokenizerPath = resourceManager.tokenizerPath
let useLlama = modelPath.range(of: "llama", options: .caseInsensitive) != nil
let useLlama = modelPath.lowercased().contains("llama")

prompt = ""
hideKeyboard()
showingSettings = false

messages.append(Message(text: text))
messages.append(Message(type: useLlama ? .llamagenerated : .llavagenerated))

runnerQueue.async {
defer {
DispatchQueue.main.async {
isGenerating = false
selectedImage = nil
}
}

Expand All @@ -272,10 +276,7 @@ struct ContentView: View {

guard !shouldStopGenerating else { return }
if useLlama {
messages.append(Message(text: text))
messages.append(Message(type: .llamagenerated))

if let runner = runnerHolder.runner, !runner.isloaded() {
if let runner = runnerHolder.runner, !runner.isLoaded() {
var error: Error?
let startLoadTime = Date()
do {
Expand Down Expand Up @@ -305,10 +306,7 @@ struct ContentView: View {
}
}
} else {
messages.append(Message(text: text))
messages.append(Message(type: .llavagenerated))

if let runner = runnerHolder.llavaRunner, !runner.isloaded() {
if let runner = runnerHolder.llavaRunner, !runner.isLoaded() {
var error: Error?
let startLoadTime = Date()
do {
Expand Down Expand Up @@ -367,7 +365,7 @@ struct ContentView: View {
if token != llava_prompt {
if token == "</s>" {
shouldStopGenerating = true
runnerHolder.runner?.stop()
runnerHolder.llavaRunner?.stop()
} else {
tokens.append(token)
if tokens.count > 2 {
Expand All @@ -383,7 +381,7 @@ struct ContentView: View {
}
}
if shouldStopGenerating {
runnerHolder.runner?.stop()
runnerHolder.llavaRunner?.stop()
}
}
}
Expand All @@ -394,7 +392,7 @@ struct ContentView: View {
try runnerHolder.runner?.generate(llama3_prompt, sequenceLength: seq_len) { token in

NSLog(">>> token={\(token)}")
if token != llama3_prompt && !shouldStopShowingToken {
if token != llama3_prompt {
// hack to fix the issue that extension/llm/runner/text_token_generator.h
// keeps generating after <|eot_id|>
if token == "<|eot_id|>" {
Expand Down Expand Up @@ -458,6 +456,7 @@ struct ContentView: View {
}
runnerQueue.async {
runnerHolder.runner = nil
runnerHolder.llavaRunner = nil
}
switch pickerType {
case .model:
Expand Down
Loading