Skip to content

Commit 770c155

Browse files
committed
feat(sdk): allow input text into the python sdk (#2738)
GitOrigin-RevId: 9c36a4ad2fa7bd831920145ecf1c29b16765cc11
1 parent 3f69d0d commit 770c155

File tree

4 files changed

+263
-7
lines changed

4 files changed

+263
-7
lines changed

README.md

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -266,6 +266,37 @@ print(result.response)
266266

267267
</details>
268268

269+
270+
<details>
271+
<summary>Use LeMUR to with Input Text</summary>
272+
273+
```python
274+
import assemblyai as aai
275+
276+
transcriber = aai.Transcriber()
277+
config = aai.TranscriptionConfig(
278+
speaker_labels=True,
279+
)
280+
transcript = transcriber.transcribe("https://example.org/customer.mp3", config=config)
281+
282+
# Example converting speaker label utterances into LeMUR input text
283+
text = ""
284+
285+
for utt in transcript.utterances:
286+
text += f"Speaker {utt.speaker}:\n{utt.text}\n"
287+
288+
result = aai.Lemur().task(
289+
"You are a helpful coach. Provide an analysis of the transcript "
290+
"and offer areas to improve with exact quotes. Include no preamble. "
291+
"Start with an overall summary then get into the examples with feedback.",
292+
input_text=text
293+
)
294+
295+
print(result.response)
296+
```
297+
298+
</details>
299+
269300
<details>
270301
<summary>Delete data previously sent to LeMUR</summary>
271302

assemblyai/lemur.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,11 @@ def __init__(
1414
) -> None:
1515
self._client = client
1616

17-
self._sources = [types.LemurSourceRequest.from_lemur_source(s) for s in sources]
17+
self._sources = (
18+
[types.LemurSourceRequest.from_lemur_source(s) for s in sources]
19+
if sources is not None
20+
else []
21+
)
1822

1923
def question(
2024
self,
@@ -24,6 +28,7 @@ def question(
2428
final_model: Optional[types.LemurModel],
2529
max_output_size: Optional[int],
2630
temperature: Optional[float],
31+
input_text: Optional[str],
2732
) -> types.LemurQuestionResponse:
2833
response = api.lemur_question(
2934
client=self._client.http_client,
@@ -34,6 +39,7 @@ def question(
3439
final_model=final_model,
3540
max_output_size=max_output_size,
3641
temperature=temperature,
42+
input_text=input_text,
3743
),
3844
http_timeout=timeout,
3945
)
@@ -48,6 +54,7 @@ def summarize(
4854
max_output_size: Optional[int],
4955
timeout: Optional[float],
5056
temperature: Optional[float],
57+
input_text: Optional[str],
5158
) -> types.LemurSummaryResponse:
5259
response = api.lemur_summarize(
5360
client=self._client.http_client,
@@ -58,6 +65,7 @@ def summarize(
5865
final_model=final_model,
5966
max_output_size=max_output_size,
6067
temperature=temperature,
68+
input_text=input_text,
6169
),
6270
http_timeout=timeout,
6371
)
@@ -72,6 +80,7 @@ def action_items(
7280
max_output_size: Optional[int],
7381
timeout: Optional[float],
7482
temperature: Optional[float],
83+
input_text: Optional[str],
7584
) -> types.LemurActionItemsResponse:
7685
response = api.lemur_action_items(
7786
client=self._client.http_client,
@@ -82,6 +91,7 @@ def action_items(
8291
final_model=final_model,
8392
max_output_size=max_output_size,
8493
temperature=temperature,
94+
input_text=input_text,
8595
),
8696
http_timeout=timeout,
8797
)
@@ -95,6 +105,7 @@ def task(
95105
max_output_size: Optional[int],
96106
timeout: Optional[float],
97107
temperature: Optional[float],
108+
input_text: Optional[str],
98109
):
99110
response = api.lemur_task(
100111
client=self._client.http_client,
@@ -104,6 +115,7 @@ def task(
104115
final_model=final_model,
105116
max_output_size=max_output_size,
106117
temperature=temperature,
118+
input_text=input_text,
107119
),
108120
http_timeout=timeout,
109121
)
@@ -121,7 +133,7 @@ class Lemur:
121133

122134
def __init__(
123135
self,
124-
sources: List[types.LemurSource],
136+
sources: Optional[List[types.LemurSource]] = None,
125137
client: Optional[_client.Client] = None,
126138
) -> None:
127139
"""
@@ -147,6 +159,7 @@ def question(
147159
max_output_size: Optional[int] = None,
148160
timeout: Optional[float] = None,
149161
temperature: Optional[float] = None,
162+
input_text: Optional[str] = None,
150163
) -> types.LemurQuestionResponse:
151164
"""
152165
Question & Answer allows you to ask free form questions about one or many transcripts.
@@ -178,6 +191,7 @@ def question(
178191
max_output_size=max_output_size,
179192
timeout=timeout,
180193
temperature=temperature,
194+
input_text=input_text,
181195
)
182196

183197
def summarize(
@@ -188,6 +202,7 @@ def summarize(
188202
max_output_size: Optional[int] = None,
189203
timeout: Optional[float] = None,
190204
temperature: Optional[float] = None,
205+
input_text: Optional[str] = None,
191206
) -> types.LemurSummaryResponse:
192207
"""
193208
Summary allows you to distill a piece of audio into a few impactful sentences.
@@ -214,6 +229,7 @@ def summarize(
214229
max_output_size=max_output_size,
215230
timeout=timeout,
216231
temperature=temperature,
232+
input_text=input_text,
217233
)
218234

219235
def action_items(
@@ -224,6 +240,7 @@ def action_items(
224240
max_output_size: Optional[int] = None,
225241
timeout: Optional[float] = None,
226242
temperature: Optional[float] = None,
243+
input_text: Optional[str] = None,
227244
) -> types.LemurActionItemsResponse:
228245
"""
229246
Action Items allows you to generate action items from one or many transcripts.
@@ -251,6 +268,7 @@ def action_items(
251268
max_output_size=max_output_size,
252269
timeout=timeout,
253270
temperature=temperature,
271+
input_text=input_text,
254272
)
255273

256274
def task(
@@ -260,6 +278,7 @@ def task(
260278
max_output_size: Optional[int] = None,
261279
timeout: Optional[float] = None,
262280
temperature: Optional[float] = None,
281+
input_text: Optional[str] = None,
263282
) -> types.LemurTaskResponse:
264283
"""
265284
Task feature allows you to submit a custom prompt to the model.
@@ -282,6 +301,7 @@ def task(
282301
max_output_size=max_output_size,
283302
timeout=timeout,
284303
temperature=temperature,
304+
input_text=input_text,
285305
)
286306

287307
@classmethod

assemblyai/types.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1773,6 +1773,7 @@ class BaseLemurRequest(BaseModel):
17731773
final_model: Optional[LemurModel]
17741774
max_output_size: Optional[int]
17751775
temperature: Optional[float]
1776+
input_text: Optional[str]
17761777

17771778

17781779
class LemurTaskRequest(BaseLemurRequest):

0 commit comments

Comments
 (0)