Skip to content

Commit

Permalink
Merge branch 'feature/adjust-position-of-context-system-prompt' into …
Browse files Browse the repository at this point in the history
…develop
  • Loading branch information
intitni committed Nov 9, 2023
2 parents 4974b20 + 8330640 commit 6938c9f
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 6 deletions.
5 changes: 3 additions & 2 deletions Core/Sources/ChatService/DynamicContextController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ final class DynamicContextController {
return contexts
}

let extraSystemPrompt = contexts
let contextSystemPrompt = contexts
.map(\.systemPrompt)
.filter { !$0.isEmpty }
.joined(separator: "\n\n")
Expand All @@ -104,9 +104,10 @@ final class DynamicContextController {

let contextualSystemPrompt = """
\(language.isEmpty ? "" : "You must always reply in \(language)")
\(systemPrompt)\(extraSystemPrompt.isEmpty ? "" : "\n\(extraSystemPrompt)")
\(systemPrompt)
"""
await memory.mutateSystemPrompt(contextualSystemPrompt)
await memory.mutateContextSystemPrompt(contextSystemPrompt)
await memory.mutateRetrievedContent(contextPrompts.map(\.content))
functionProvider.append(functions: contexts.flatMap(\.functions))
}
Expand Down
28 changes: 24 additions & 4 deletions Tool/Sources/OpenAIService/Memory/AutoManagedChatGPTMemory.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
public private(set) var remainingTokens: Int?

public var systemPrompt: String
public var contextSystemPrompt: String
public var retrievedContent: [String] = []
public var history: [ChatMessage] = [] {
didSet { onHistoryChange() }
Expand All @@ -27,6 +28,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
functionProvider: ChatGPTFunctionProvider
) {
self.systemPrompt = systemPrompt
contextSystemPrompt = ""
self.configuration = configuration
self.functionProvider = functionProvider
_ = Self.encoder // force pre-initialize
Expand All @@ -40,6 +42,10 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
systemPrompt = newPrompt
}

public func mutateContextSystemPrompt(_ newPrompt: String) {
contextSystemPrompt = newPrompt
}

public func mutateRetrievedContent(_ newContent: [String]) {
retrievedContent = newContent
}
Expand Down Expand Up @@ -67,6 +73,8 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
/// [Retrieved Content B]
/// [Functions] priority: high
/// [Message History] priority: medium
/// [Context System Prompt] priority: high
/// [Latest Message] priority: high
/// ```
func generateSendingHistory(
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
Expand All @@ -80,7 +88,12 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
}

var smallestSystemPromptMessage = ChatMessage(role: .system, content: systemPrompt)
var contextSystemPromptMessage = ChatMessage(role: .system, content: contextSystemPrompt)
let smallestSystemMessageTokenCount = countToken(&smallestSystemPromptMessage)
let contextSystemPromptTokenCount = !contextSystemPrompt.isEmpty
? countToken(&contextSystemPromptMessage)
: 0

let functionTokenCount = functionProvider.functions.reduce(into: 0) { partial, function in
var count = encoder.countToken(text: function.name)
+ encoder.countToken(text: function.description)
Expand All @@ -92,6 +105,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
partial += count
}
let mandatoryContentTokensCount = smallestSystemMessageTokenCount
+ contextSystemPromptTokenCount
+ functionTokenCount
+ 3 // every reply is primed with <|start|>assistant<|message|>

Expand Down Expand Up @@ -135,13 +149,13 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
for (index, content) in retrievedContent.filter({ !$0.isEmpty }).enumerated() {
if index == 0 {
if !appendToSystemPrompt("""
## Relevant Content
Below are information related to the conversation, separated by \(separator)
""") { break }
} else {
if !appendToSystemPrompt("\n\(separator)\n") { break }
Expand All @@ -154,16 +168,22 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
let message = ChatMessage(role: .system, content: systemPrompt)
allMessages.append(message)
}

if !contextSystemPrompt.isEmpty {
allMessages.insert(contextSystemPromptMessage, at: 1)
}

#if DEBUG
Logger.service.info("""
Sending tokens count
- system prompt: \(smallestSystemMessageTokenCount)
- context system prompt: \(contextSystemPromptTokenCount)
- functions: \(functionTokenCount)
- messages: \(messageTokenCount)
- retrieved content: \(retrievedContentTokenCount)
- total: \(
smallestSystemMessageTokenCount
+ contextSystemPromptTokenCount
+ functionTokenCount
+ messageTokenCount
+ retrievedContentTokenCount
Expand Down

0 comments on commit 6938c9f

Please sign in to comment.