Skip to content

Commit d704f74

Browse files
committed
Add id and encrypted fields to Message.Reasoning and integrate across clients
1 parent 2ee71cb commit d704f74

File tree

6 files changed

+27
-29
lines changed
  • prompt
    • prompt-executor/prompt-executor-clients
      • prompt-executor-anthropic-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/anthropic
      • prompt-executor-bedrock-client/src/jvmMain/kotlin/ai/koog/prompt/executor/clients/bedrock/modelfamilies/anthropic
      • prompt-executor-google-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/google
      • prompt-executor-openai-client-base/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai/base
      • prompt-executor-openai-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai
    • prompt-model/src/commonMain/kotlin/ai/koog/prompt/message

6 files changed

+27
-29
lines changed

prompt/prompt-executor/prompt-executor-clients/prompt-executor-anthropic-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/anthropic/AnthropicLLMClient.kt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -328,9 +328,9 @@ public open class AnthropicLLMClient(
328328
messages.add(
329329
AnthropicMessage.Assistant(
330330
content = listOf(
331-
message.original as? AnthropicContent.Thinking ?: AnthropicContent.Thinking(
332-
signature = Uuid.random().toString(),
333-
thinking = message.content // TODO: Use signature from original message if available
331+
AnthropicContent.Thinking(
332+
signature = message.encrypted!!,
333+
thinking = message.content
334334
)
335335
)
336336
)
@@ -507,7 +507,7 @@ public open class AnthropicLLMClient(
507507
}
508508

509509
is AnthropicContent.Thinking -> {
510-
Message.Reasoning(original = content, content = content.thinking, metaInfo = metaInfo)
510+
Message.Reasoning(encrypted = content.signature, content = content.thinking, metaInfo = metaInfo)
511511
}
512512

513513
is AnthropicContent.ToolUse -> {

prompt/prompt-executor/prompt-executor-clients/prompt-executor-bedrock-client/src/jvmMain/kotlin/ai/koog/prompt/executor/clients/bedrock/modelfamilies/anthropic/BedrockAnthropicClaudeSerialization.kt

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@ import kotlinx.serialization.json.buildJsonArray
2222
import kotlinx.serialization.json.buildJsonObject
2323
import kotlinx.serialization.json.encodeToJsonElement
2424
import kotlinx.serialization.json.put
25-
import kotlin.uuid.ExperimentalUuidApi
26-
import kotlin.uuid.Uuid
2725

2826
internal object BedrockAnthropicClaudeSerialization {
2927

@@ -35,7 +33,6 @@ internal object BedrockAnthropicClaudeSerialization {
3533
explicitNulls = false
3634
}
3735

38-
@OptIn(ExperimentalUuidApi::class)
3936
private fun buildMessagesHistory(prompt: Prompt): MutableList<BedrockAnthropicInvokeModelMessage> {
4037
val messages = mutableListOf<BedrockAnthropicInvokeModelMessage>()
4138
prompt.messages.forEach { msg ->
@@ -68,11 +65,10 @@ internal object BedrockAnthropicClaudeSerialization {
6865
messages.add(
6966
BedrockAnthropicInvokeModelMessage.Assistant(
7067
content = listOf(
71-
msg.original as? BedrockAnthropicInvokeModelContent.Thinking
72-
?: BedrockAnthropicInvokeModelContent.Thinking(
73-
signature = Uuid.random().toString(),
74-
thinking = msg.content
75-
)
68+
BedrockAnthropicInvokeModelContent.Thinking(
69+
signature = msg.encrypted!!,
70+
thinking = msg.content
71+
)
7672
)
7773
)
7874
)
@@ -181,7 +177,6 @@ internal object BedrockAnthropicClaudeSerialization {
181177
)
182178
}
183179

184-
@OptIn(ExperimentalUuidApi::class)
185180
internal fun parseAnthropicResponse(responseBody: String, clock: Clock = Clock.System): List<Message.Response> {
186181
val response = json.decodeFromString<BedrockAnthropicResponse>(responseBody)
187182

@@ -204,7 +199,7 @@ internal object BedrockAnthropicClaudeSerialization {
204199
)
205200

206201
is AnthropicContent.Thinking -> Message.Reasoning(
207-
original = content,
202+
encrypted = content.signature,
208203
content = content.thinking,
209204
metaInfo = metaInfo
210205
)
@@ -255,6 +250,7 @@ internal object BedrockAnthropicClaudeSerialization {
255250
name = content.name,
256251
content = content.input.toString()
257252
)
253+
258254
else -> throw IllegalArgumentException(
259255
"Unsupported AnthropicContent type in message_delta. Content: $content"
260256
)

prompt/prompt-executor/prompt-executor-clients/prompt-executor-google-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/google/GoogleLLMClient.kt

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -333,9 +333,10 @@ public open class GoogleLLMClient(
333333
GoogleContent(
334334
role = "assistant",
335335
parts = listOf(
336-
message.original as? GooglePart ?: GooglePart.Text(
336+
GooglePart.Text(
337337
text = message.content,
338-
thought = true
338+
thoughtSignature = message.encrypted,
339+
thought = true,
339340
)
340341
)
341342
)
@@ -606,7 +607,7 @@ public open class GoogleLLMClient(
606607
is GooglePart.Text -> {
607608
if (part.thought ?: false) {
608609
Message.Reasoning(
609-
original = part,
610+
encrypted = part.thoughtSignature,
610611
content = part.text,
611612
metaInfo = metaInfo
612613
)

prompt/prompt-executor/prompt-executor-clients/prompt-executor-openai-client-base/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai/base/AbstractOpenAILLMClient.kt

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,7 @@ public abstract class AbstractOpenAILLMClient<TResponse : OpenAIBaseLLMResponse,
283283

284284
is Message.Reasoning -> {
285285
flushPendingCalls()
286-
messages += message.original as? OpenAIMessage.Assistant ?: OpenAIMessage.Assistant(
286+
messages += OpenAIMessage.Assistant(
287287
content = Content.Text(message.content),
288288
reasoningContent = message.content
289289
)
@@ -471,7 +471,6 @@ public abstract class AbstractOpenAILLMClient<TResponse : OpenAIBaseLLMResponse,
471471

472472
this is OpenAIMessage.Assistant && this.reasoningContent != null && this.content != null -> listOf(
473473
Message.Reasoning(
474-
original = this,
475474
content = this.reasoningContent,
476475
metaInfo = metaInfo
477476
),

prompt/prompt-executor/prompt-executor-clients/prompt-executor-openai-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai/OpenAILLMClient.kt

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -602,9 +602,10 @@ public open class OpenAILLMClient(
602602
is Message.Reasoning -> {
603603
flushPendingCalls()
604604
add(
605-
message.original as? Item.Reasoning ?: Item.Reasoning(
606-
Uuid.random().toString(),
607-
listOf(Item.Reasoning.Summary(message.content))
605+
Item.Reasoning(
606+
id = message.id ?: Uuid.random().toString(),
607+
encryptedContent = message.encrypted,
608+
summary = listOf(Item.Reasoning.Summary(message.content))
608609
)
609610
)
610611
}
@@ -709,7 +710,8 @@ public open class OpenAILLMClient(
709710
)
710711

711712
is Item.Reasoning -> Message.Reasoning(
712-
original = output,
713+
id = output.id,
714+
encrypted = output.encryptedContent,
713715
content = output.summary.joinToString(separator = "\n") { it.text },
714716
metaInfo = metaInfo
715717
)
@@ -735,13 +737,15 @@ public open class OpenAILLMClient(
735737
)
736738
params
737739
}
740+
738741
params is OpenAIChatParams -> {
739742
model.requireCapability(
740743
LLMCapability.OpenAIEndpoint.Completions,
741744
message = "Must be supported to use OpenAI chat params."
742745
)
743746
params
744747
}
748+
745749
model.supports(LLMCapability.OpenAIEndpoint.Completions) -> params.toOpenAIChatParams()
746750
model.supports(LLMCapability.OpenAIEndpoint.Responses) -> params.toOpenAIResponsesParams()
747751
else -> error("Cannot determine proper LLM params for OpenAI model: ${model.id}")

prompt/prompt-model/src/commonMain/kotlin/ai/koog/prompt/message/Message.kt

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ package ai.koog.prompt.message
33
import kotlinx.datetime.Clock
44
import kotlinx.datetime.Instant
55
import kotlinx.serialization.Serializable
6-
import kotlinx.serialization.Transient
76
import kotlinx.serialization.json.Json
87
import kotlinx.serialization.json.JsonObject
98
import kotlinx.serialization.json.jsonObject
@@ -143,9 +142,8 @@ public sealed interface Message {
143142
* Represents a reasoning message exchanged in a chat system, encapsulating the content,
144143
* role, and associated metadata, with an optional reference to the original thinking process.
145144
*
146-
* @property original An optional reference to the original [Thinking] process that generated this reasoning,
147-
* providing context or tracing capabilities during operations. This property is transient and
148-
* will not be serialized.
145+
* @property id An optional identifier for the reasoning process.
146+
* @property encrypted The encrypted content of the reasoning message.
149147
* @property content The content of the message as a string.
150148
* @property role The [Role] of the message, indicating its source or function in the chat (e.g., assistant, user).
151149
* Defaults to [Role.Assistant].
@@ -154,8 +152,8 @@ public sealed interface Message {
154152
*/
155153
@Serializable
156154
public data class Reasoning(
157-
@Transient
158-
public val original: Thinking? = null,
155+
public val id: String? = null,
156+
public val encrypted: String? = null,
159157
override val content: String,
160158
override val role: Role = Role.Assistant,
161159
override val metaInfo: ResponseMetaInfo

0 commit comments

Comments
 (0)