LmStudioChatModelOptions
constructor(model: ModelName, stream: Boolean? = null, maxTokens: MaxTokens? = null, temperature: Temperature = Temperature.ONE, top_p: Double = 1.0, n: Int = 1, stop: List<String>? = null, presencePenalty: Double = 0.0, frequencyPenalty: Double = 0.0, logitBias: Map<TokenId, Double>? = null, user: User? = null, responseFormat: ResponseFormat? = null, toolChoice: Any? = null, parallelToolCalls: Boolean? = null)