1
1
import pytest
2
2
from ldclient import Config , Context , LDClient
3
3
from ldclient .integrations .test_data import TestData
4
- from ldclient .testing .builders import *
5
4
6
- from ldai .client import AIConfig , AIConfigData , LDAIClient , LDMessage
5
+ from ldai .client import AIConfig , LDAIClient , LDMessage , ModelConfig
7
6
from ldai .tracker import LDAIConfigTracker
8
7
9
8
@@ -14,7 +13,7 @@ def td() -> TestData:
14
13
td .flag ('model-config' )
15
14
.variations (
16
15
{
17
- 'model' : {'modelId' : 'fakeModel' },
16
+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.5 , 'maxTokens' : 4096 },
18
17
'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
19
18
'_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
20
19
},
@@ -27,7 +26,7 @@ def td() -> TestData:
27
26
td .flag ('multiple-prompt' )
28
27
.variations (
29
28
{
30
- 'model' : {'modelId' : 'fakeModel' },
29
+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.7 , 'maxTokens' : 8192 },
31
30
'prompt' : [
32
31
{'role' : 'system' , 'content' : 'Hello, {{name}}!' },
33
32
{'role' : 'user' , 'content' : 'The day is, {{day}}!' },
@@ -43,7 +42,7 @@ def td() -> TestData:
43
42
td .flag ('ctx-interpolation' )
44
43
.variations (
45
44
{
46
- 'model' : {'modelId' : 'fakeModel' },
45
+ 'model' : {'modelId' : 'fakeModel' , 'extra-attribute' : 'I can be anything I set my mind/type to' },
47
46
'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{ldctx.name}}!' }],
48
47
'_ldMeta' : {'enabled' : True , 'versionKey' : 'abcd' },
49
48
}
@@ -55,7 +54,7 @@ def td() -> TestData:
55
54
td .flag ('off-config' )
56
55
.variations (
57
56
{
58
- 'model' : {'modelId' : 'fakeModel' },
57
+ 'model' : {'modelId' : 'fakeModel' , 'temperature' : 0.1 },
59
58
'prompt' : [{'role' : 'system' , 'content' : 'Hello, {{name}}!' }],
60
59
'_ldMeta' : {'enabled' : False , 'versionKey' : 'abcd' },
61
60
}
@@ -82,81 +81,110 @@ def ldai_client(client: LDClient) -> LDAIClient:
82
81
return LDAIClient (client )
83
82
84
83
84
+ def test_model_config_delegates_to_properties ():
85
+ model = ModelConfig ('fakeModel' , temperature = 0.5 , max_tokens = 4096 , attributes = {'extra-attribute' : 'value' })
86
+ assert model .id == 'fakeModel'
87
+ assert model .temperature == 0.5
88
+ assert model .max_tokens == 4096
89
+ assert model .get_attribute ('extra-attribute' ) == 'value'
90
+ assert model .get_attribute ('non-existent' ) is None
91
+
92
+ assert model .id == model .get_attribute ('id' )
93
+ assert model .temperature == model .get_attribute ('temperature' )
94
+ assert model .max_tokens == model .get_attribute ('maxTokens' )
95
+ assert model .max_tokens != model .get_attribute ('max_tokens' )
96
+
97
+
85
98
def test_model_config_interpolation (ldai_client : LDAIClient , tracker ):
86
99
context = Context .create ('user-key' )
87
100
default_value = AIConfig (
88
- config = AIConfigData (
89
- model = {'modelId' : 'fakeModel' },
90
- prompt = [LDMessage (role = 'system' , content = 'Hello, {{name}}!' )],
91
- ),
92
101
tracker = tracker ,
93
102
enabled = True ,
103
+ model = ModelConfig ('fakeModel' ),
104
+ prompt = [LDMessage (role = 'system' , content = 'Hello, {{name}}!' )],
94
105
)
95
106
variables = {'name' : 'World' }
96
107
97
108
config = ldai_client .model_config ('model-config' , context , default_value , variables )
98
109
99
- assert config .config . prompt is not None
100
- assert len (config .config . prompt ) > 0
101
- assert config .config . prompt [0 ].content == 'Hello, World!'
110
+ assert config .prompt is not None
111
+ assert len (config .prompt ) > 0
112
+ assert config .prompt [0 ].content == 'Hello, World!'
102
113
assert config .enabled is True
103
114
115
+ assert config .model is not None
116
+ assert config .model .id == 'fakeModel'
117
+ assert config .model .temperature == 0.5
118
+ assert config .model .max_tokens == 4096
119
+
104
120
105
121
def test_model_config_no_variables (ldai_client : LDAIClient , tracker ):
106
122
context = Context .create ('user-key' )
107
- default_value = AIConfig (
108
- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = True
109
- )
123
+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ('fake-model' ), prompt = [])
110
124
111
125
config = ldai_client .model_config ('model-config' , context , default_value , {})
112
126
113
- assert config .config . prompt is not None
114
- assert len (config .config . prompt ) > 0
115
- assert config .config . prompt [0 ].content == 'Hello, !'
127
+ assert config .prompt is not None
128
+ assert len (config .prompt ) > 0
129
+ assert config .prompt [0 ].content == 'Hello, !'
116
130
assert config .enabled is True
117
131
132
+ assert config .model is not None
133
+ assert config .model .id == 'fakeModel'
134
+ assert config .model .temperature == 0.5
135
+ assert config .model .max_tokens == 4096
136
+
118
137
119
138
def test_context_interpolation (ldai_client : LDAIClient , tracker ):
120
139
context = Context .builder ('user-key' ).name ("Sandy" ).build ()
121
- default_value = AIConfig (
122
- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = True
123
- )
140
+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ('fake-model' ), prompt = [])
124
141
variables = {'name' : 'World' }
125
142
126
143
config = ldai_client .model_config (
127
144
'ctx-interpolation' , context , default_value , variables
128
145
)
129
146
130
- assert config .config . prompt is not None
131
- assert len (config .config . prompt ) > 0
132
- assert config .config . prompt [0 ].content == 'Hello, Sandy!'
147
+ assert config .prompt is not None
148
+ assert len (config .prompt ) > 0
149
+ assert config .prompt [0 ].content == 'Hello, Sandy!'
133
150
assert config .enabled is True
134
151
152
+ assert config .model is not None
153
+ assert config .model .id == 'fakeModel'
154
+ assert config .model .temperature is None
155
+ assert config .model .max_tokens is None
156
+ assert config .model .get_attribute ('extra-attribute' ) == 'I can be anything I set my mind/type to'
157
+
135
158
136
159
def test_model_config_multiple (ldai_client : LDAIClient , tracker ):
137
160
context = Context .create ('user-key' )
138
- default_value = AIConfig (
139
- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = True
140
- )
161
+ default_value = AIConfig (tracker = tracker , enabled = True , model = ModelConfig ('fake-model' ), prompt = [])
141
162
variables = {'name' : 'World' , 'day' : 'Monday' }
142
163
143
164
config = ldai_client .model_config (
144
165
'multiple-prompt' , context , default_value , variables
145
166
)
146
167
147
- assert config .config . prompt is not None
148
- assert len (config .config . prompt ) > 0
149
- assert config .config . prompt [0 ].content == 'Hello, World!'
150
- assert config .config . prompt [1 ].content == 'The day is, Monday!'
168
+ assert config .prompt is not None
169
+ assert len (config .prompt ) > 0
170
+ assert config .prompt [0 ].content == 'Hello, World!'
171
+ assert config .prompt [1 ].content == 'The day is, Monday!'
151
172
assert config .enabled is True
152
173
174
+ assert config .model is not None
175
+ assert config .model .id == 'fakeModel'
176
+ assert config .model .temperature == 0.7
177
+ assert config .model .max_tokens == 8192
178
+
153
179
154
180
def test_model_config_disabled (ldai_client : LDAIClient , tracker ):
155
181
context = Context .create ('user-key' )
156
- default_value = AIConfig (
157
- config = AIConfigData (model = {}, prompt = []), tracker = tracker , enabled = False
158
- )
182
+ default_value = AIConfig (tracker = tracker , enabled = False , model = ModelConfig ('fake-model' ), prompt = [])
159
183
160
184
config = ldai_client .model_config ('off-config' , context , default_value , {})
161
185
186
+ assert config .model is not None
162
187
assert config .enabled is False
188
+ assert config .model .id == 'fakeModel'
189
+ assert config .model .temperature == 0.1
190
+ assert config .model .max_tokens is None
0 commit comments