Skip to content

Commit ea06920

Browse files
authored
fix: AI chat freeze, performance, code quality (#774) (#796)
* fix: cap query/results size, move buildSystemPrompt off MainActor (#774) * fix: AI code highlighting dedup, inline suggestion batching, scroll anchor, size check * fix: AI maxOutputTokens config, Gemini fallback models, conversation cap, minor cleanup
1 parent 1dda27d commit ea06920

16 files changed

+237
-272
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
2626

2727
### Fixed
2828

29+
- AI chat freeze when large queries or results are included in the system prompt (#774)
30+
- AI chat panel not updating when switching database connections
2931
- Schema restored on reconnect for PostgreSQL, Redshift, and BigQuery (#777)
3032
- Database restored after auto-reconnect (was lost when connection dropped)
3133
- Redis database selection persisted across sessions

TablePro/Core/AI/AIPromptTemplates.swift

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,33 @@ enum AIPromptTemplates {
1212
/// Build a prompt asking AI to explain a query
1313
@MainActor static func explainQuery(_ query: String, databaseType: DatabaseType = .mysql) -> String {
1414
let (typeName, lang) = queryInfo(for: databaseType)
15-
return "Explain this \(typeName):\n\n```\(lang)\n\(query)\n```"
15+
return explainQuery(query, typeName: typeName, language: lang)
1616
}
1717

1818
/// Build a prompt asking AI to optimize a query
1919
@MainActor static func optimizeQuery(_ query: String, databaseType: DatabaseType = .mysql) -> String {
2020
let (typeName, lang) = queryInfo(for: databaseType)
21-
return "Optimize this \(typeName) for better performance:\n\n```\(lang)\n\(query)\n```"
21+
return optimizeQuery(query, typeName: typeName, language: lang)
2222
}
2323

2424
/// Build a prompt asking AI to fix a query that produced an error
2525
@MainActor static func fixError(query: String, error: String, databaseType: DatabaseType = .mysql) -> String {
2626
let (typeName, lang) = queryInfo(for: databaseType)
27-
return "This \(typeName) failed with an error. Please fix it.\n\nQuery:\n```\(lang)\n\(query)\n```\n\nError: \(error)"
27+
return fixError(query: query, error: error, typeName: typeName, language: lang)
28+
}
29+
30+
// MARK: - Non-isolated overloads
31+
32+
static func explainQuery(_ query: String, typeName: String, language: String) -> String {
33+
"Explain this \(typeName):\n\n```\(language)\n\(query)\n```"
34+
}
35+
36+
static func optimizeQuery(_ query: String, typeName: String, language: String) -> String {
37+
"Optimize this \(typeName) for better performance:\n\n```\(language)\n\(query)\n```"
38+
}
39+
40+
static func fixError(query: String, error: String, typeName: String, language: String) -> String {
41+
"This \(typeName) failed with an error. Please fix it.\n\nQuery:\n```\(language)\n\(query)\n```\n\nError: \(error)"
2842
}
2943

3044
@MainActor private static func queryInfo(for databaseType: DatabaseType) -> (typeName: String, language: String) {

TablePro/Core/AI/AIProviderFactory.swift

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,18 +36,21 @@ enum AIProviderFactory {
3636
case .claude:
3737
provider = AnthropicProvider(
3838
endpoint: config.endpoint,
39-
apiKey: apiKey ?? ""
39+
apiKey: apiKey ?? "",
40+
maxOutputTokens: config.maxOutputTokens ?? 4_096
4041
)
4142
case .gemini:
4243
provider = GeminiProvider(
4344
endpoint: config.endpoint,
44-
apiKey: apiKey ?? ""
45+
apiKey: apiKey ?? "",
46+
maxOutputTokens: config.maxOutputTokens ?? 8_192
4547
)
4648
case .openAI, .openRouter, .ollama, .custom:
4749
provider = OpenAICompatibleProvider(
4850
endpoint: config.endpoint,
4951
apiKey: apiKey,
50-
providerType: config.type
52+
providerType: config.type,
53+
maxOutputTokens: config.maxOutputTokens
5154
)
5255
}
5356
cache[config.id] = (apiKey, provider)

TablePro/Core/AI/AISchemaContext.swift

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,11 @@ struct AISchemaContext {
5353
let query = currentQuery,
5454
!query.isEmpty {
5555
let lang = editorLanguage.codeBlockTag
56-
parts.append("\n## Current Query\n```\(lang)\n\(query)\n```")
56+
let maxQueryLength = 2_000
57+
let truncated = query.count > maxQueryLength
58+
? String(query.prefix(maxQueryLength)) + "\n-- ... truncated"
59+
: query
60+
parts.append("\n## Current Query\n```\(lang)\n\(truncated)\n```")
5761
}
5862

5963
if settings.includeQueryResults,

TablePro/Core/AI/AnthropicProvider.swift

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,13 @@ final class AnthropicProvider: AIProvider {
1414

1515
private let endpoint: String
1616
private let apiKey: String
17+
private let maxOutputTokens: Int
1718
private let session: URLSession
1819

19-
init(endpoint: String, apiKey: String) {
20+
init(endpoint: String, apiKey: String, maxOutputTokens: Int = 4_096) {
2021
self.endpoint = endpoint.hasSuffix("/") ? String(endpoint.dropLast()) : endpoint
2122
self.apiKey = apiKey.trimmingCharacters(in: .whitespacesAndNewlines)
23+
self.maxOutputTokens = maxOutputTokens
2224
self.session = URLSession(configuration: .ephemeral)
2325
}
2426

@@ -182,9 +184,10 @@ final class AnthropicProvider: AIProvider {
182184
messages: [AIChatMessage],
183185
model: String,
184186
systemPrompt: String?,
185-
maxTokens: Int = 4_096,
187+
maxTokens: Int? = nil,
186188
stream: Bool = true
187189
) throws -> URLRequest {
190+
let maxTokens = maxTokens ?? maxOutputTokens
188191
guard let url = URL(string: "\(endpoint)/v1/messages") else {
189192
throw AIProviderError.invalidEndpoint(endpoint)
190193
}

TablePro/Core/AI/GeminiProvider.swift

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,13 @@ final class GeminiProvider: AIProvider {
1414

1515
private let endpoint: String
1616
private let apiKey: String
17+
private let maxOutputTokens: Int
1718
private let session: URLSession
1819

19-
init(endpoint: String, apiKey: String) {
20+
init(endpoint: String, apiKey: String, maxOutputTokens: Int = 8_192) {
2021
self.endpoint = endpoint.hasSuffix("/") ? String(endpoint.dropLast()) : endpoint
2122
self.apiKey = apiKey.trimmingCharacters(in: .whitespacesAndNewlines)
23+
self.maxOutputTokens = maxOutputTokens
2224
self.session = URLSession(configuration: .ephemeral)
2325
}
2426

@@ -106,6 +108,14 @@ final class GeminiProvider: AIProvider {
106108
}
107109
}
108110

111+
private static let knownModels = [
112+
"gemini-2.5-flash",
113+
"gemini-2.5-pro",
114+
"gemini-2.0-flash",
115+
"gemini-1.5-flash",
116+
"gemini-1.5-pro"
117+
]
118+
109119
func fetchAvailableModels() async throws -> [String] {
110120
guard let url = URL(string: "\(endpoint)/v1beta/models") else {
111121
throw AIProviderError.invalidEndpoint(endpoint)
@@ -115,36 +125,41 @@ final class GeminiProvider: AIProvider {
115125
request.httpMethod = "GET"
116126
request.setValue(apiKey, forHTTPHeaderField: "x-goog-api-key")
117127

118-
let (data, response) = try await session.data(for: request)
128+
let data: Data
129+
let response: URLResponse
130+
do {
131+
(data, response) = try await session.data(for: request)
132+
} catch {
133+
return Self.knownModels
134+
}
119135

120136
guard let httpResponse = response as? HTTPURLResponse else {
121-
throw AIProviderError.networkError("Invalid response")
137+
return Self.knownModels
122138
}
123139

124140
guard httpResponse.statusCode == 200 else {
125-
throw mapHTTPError(
126-
statusCode: httpResponse.statusCode,
127-
body: String(data: data, encoding: .utf8) ?? ""
128-
)
141+
return Self.knownModels
129142
}
130143

131144
guard let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any],
132145
let models = json["models"] as? [[String: Any]]
133146
else {
134-
return []
147+
return Self.knownModels
135148
}
136149

137-
return models.compactMap { model -> String? in
150+
let fetched = models.compactMap { model -> String? in
138151
guard let name = model["name"] as? String,
139152
let methods = model["supportedGenerationMethods"] as? [String],
140153
methods.contains("generateContent")
141154
else { return nil }
142-
// Strip "models/" prefix: "models/gemini-2.0-flash" "gemini-2.0-flash"
155+
// Strip "models/" prefix: "models/gemini-2.0-flash" -> "gemini-2.0-flash"
143156
if name.hasPrefix("models/") {
144157
return String(name.dropFirst(7))
145158
}
146159
return name
147160
}
161+
162+
return fetched.isEmpty ? Self.knownModels : fetched
148163
}
149164

150165
func testConnection() async throws -> Bool {
@@ -196,7 +211,7 @@ final class GeminiProvider: AIProvider {
196211
request.setValue(apiKey, forHTTPHeaderField: "x-goog-api-key")
197212

198213
var body: [String: Any] = [
199-
"generationConfig": ["maxOutputTokens": 8_192]
214+
"generationConfig": ["maxOutputTokens": maxOutputTokens]
200215
]
201216

202217
if let systemPrompt, !systemPrompt.isEmpty {

TablePro/Core/AI/InlineSuggestionManager.swift

Lines changed: 27 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -234,22 +234,38 @@ final class InlineSuggestionManager {
234234
systemPrompt: systemPrompt
235235
)
236236

237+
let flushInterval: ContinuousClock.Duration = .milliseconds(50)
238+
var lastFlushTime: ContinuousClock.Instant = .now
239+
237240
for try await event in stream {
238241
guard !Task.isCancelled else { break }
239-
switch event {
240-
case .text(let token):
242+
if case .text(let token) = event {
241243
accumulated += token
242-
// Progressive update: show partial ghost text as tokens arrive
243-
await MainActor.run { [weak self, accumulated] in
244-
guard let self else { return }
245-
let cleaned = self.cleanSuggestion(accumulated)
246-
if !cleaned.isEmpty {
247-
self.currentSuggestion = cleaned
248-
self.showGhostText(cleaned, at: self.suggestionOffset)
244+
if ContinuousClock.now - lastFlushTime >= flushInterval {
245+
let snapshot = accumulated
246+
await MainActor.run { [weak self] in
247+
guard let self else { return }
248+
let cleaned = self.cleanSuggestion(snapshot)
249+
if !cleaned.isEmpty {
250+
self.currentSuggestion = cleaned
251+
self.showGhostText(cleaned, at: self.suggestionOffset)
252+
}
249253
}
254+
lastFlushTime = .now
255+
}
256+
}
257+
}
258+
259+
// Final flush
260+
if !Task.isCancelled, !accumulated.isEmpty {
261+
let snapshot = accumulated
262+
await MainActor.run { [weak self] in
263+
guard let self else { return }
264+
let cleaned = self.cleanSuggestion(snapshot)
265+
if !cleaned.isEmpty {
266+
self.currentSuggestion = cleaned
267+
self.showGhostText(cleaned, at: self.suggestionOffset)
250268
}
251-
case .usage:
252-
break
253269
}
254270
}
255271

TablePro/Core/AI/OpenAICompatibleProvider.swift

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,14 @@ final class OpenAICompatibleProvider: AIProvider {
1818
private let endpoint: String
1919
private let apiKey: String?
2020
private let providerType: AIProviderType
21+
private let maxOutputTokens: Int?
2122
private let session: URLSession
2223

23-
init(endpoint: String, apiKey: String?, providerType: AIProviderType) {
24+
init(endpoint: String, apiKey: String?, providerType: AIProviderType, maxOutputTokens: Int? = nil) {
2425
self.endpoint = endpoint.hasSuffix("/") ? String(endpoint.dropLast()) : endpoint
2526
self.apiKey = apiKey?.trimmingCharacters(in: .whitespacesAndNewlines)
2627
self.providerType = providerType
28+
self.maxOutputTokens = maxOutputTokens
2729
self.session = URLSession(configuration: .ephemeral)
2830
}
2931

@@ -254,6 +256,10 @@ final class OpenAICompatibleProvider: AIProvider {
254256
"stream": true
255257
]
256258

259+
if let maxOutputTokens {
260+
body["max_tokens"] = maxOutputTokens
261+
}
262+
257263
// Request usage stats in stream (OpenAI/OpenRouter support this)
258264
if providerType != .ollama {
259265
body["stream_options"] = ["include_usage": true]

TablePro/Core/Storage/AIChatStorage.swift

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,12 +59,25 @@ actor AIChatStorage {
5959

6060
// MARK: - Public Methods
6161

62+
/// Maximum encoded size for a single conversation file (500 KB)
63+
private static let maxFileSize = 500_000
64+
65+
/// Maximum number of messages to keep after trimming
66+
private static let trimmedMessageCount = 50
67+
6268
/// Save a conversation to disk
6369
func save(_ conversation: AIConversation) {
6470
let fileURL = directory.appendingPathComponent("\(conversation.id.uuidString).json")
6571

6672
do {
67-
let data = try Self.encoder.encode(conversation)
73+
var data = try Self.encoder.encode(conversation)
74+
75+
if data.count > Self.maxFileSize {
76+
var trimmed = conversation
77+
trimmed.messages = Array(trimmed.messages.suffix(Self.trimmedMessageCount))
78+
data = try Self.encoder.encode(trimmed)
79+
}
80+
6881
try data.write(to: fileURL, options: [.atomic, .completeFileProtectionUntilFirstUserAuthentication])
6982
} catch {
7083
Self.logger.error("Failed to save conversation \(conversation.id): \(error.localizedDescription)")

TablePro/Models/AI/AIModels.swift

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,21 +60,24 @@ struct AIProviderConfig: Codable, Equatable, Identifiable {
6060
var model: String
6161
var endpoint: String
6262
var isEnabled: Bool
63+
var maxOutputTokens: Int?
6364

6465
init(
6566
id: UUID = UUID(),
6667
name: String = "",
6768
type: AIProviderType = .claude,
6869
model: String = "",
6970
endpoint: String = "",
70-
isEnabled: Bool = true
71+
isEnabled: Bool = true,
72+
maxOutputTokens: Int? = nil
7173
) {
7274
self.id = id
7375
self.name = name
7476
self.type = type
7577
self.model = model
7678
self.endpoint = endpoint.isEmpty ? type.defaultEndpoint : endpoint
7779
self.isEnabled = isEnabled
80+
self.maxOutputTokens = maxOutputTokens
7881
}
7982
}
8083

0 commit comments

Comments
 (0)