From 946ca32aaca44edc0e96ab88a062341ed938dd5c Mon Sep 17 00:00:00 2001 From: John Vajda Date: Thu, 6 Feb 2025 17:14:04 -0700 Subject: [PATCH 1/3] nova-3 and keyterms added --- README.md | 4 +- .../clients/agent/v1/websocket/options.py | 4 +- deepgram/clients/listen/v1/rest/options.py | 5 +- .../clients/listen/v1/websocket/options.py | 5 +- .../advanced/rest/direct_invocation/main.py | 2 +- .../websocket/direct_invocation/main.py | 2 +- .../websocket/microphone_inheritance/main.py | 2 +- .../websocket/mute-microphone/main.py | 2 +- examples/analyze/intent/conversation.txt | 10 +- .../legacy_dict_intent/conversation.txt | 10 +- examples/analyze/sentiment/conversation.txt | 10 +- .../analyze/stream_intent/conversation.txt | 10 +- examples/analyze/summary/conversation.txt | 10 +- examples/analyze/topic/conversation.txt | 10 +- .../speech-to-text/rest/async_file/main.py | 2 +- .../speech-to-text/rest/async_url/main.py | 2 +- .../rest/callback/callback/main.py | 2 +- examples/speech-to-text/rest/file/main.py | 2 +- examples/speech-to-text/rest/intent/main.py | 2 +- .../rest/legacy_dict_url/main.py | 2 +- .../speech-to-text/rest/sentiment/main.py | 2 +- .../speech-to-text/rest/stream_file/main.py | 2 +- examples/speech-to-text/rest/summary/main.py | 2 +- examples/speech-to-text/rest/topic/main.py | 2 +- examples/speech-to-text/rest/url/main.py | 2 +- .../websocket/async_http/main.py | 2 +- .../websocket/async_microphone/main.py | 2 +- .../speech-to-text/websocket/http/main.py | 2 +- .../websocket/legacy_dict_microphone/main.py | 2 +- .../websocket/microphone/main.py | 2 +- .../speech-to-text/websocket/replay/main.py | 2 +- tests/daily_test/conversation.txt | 10 +- .../test_daily_async_listen_rest_file.py | 4 +- .../test_daily_async_listen_rest_url.py | 4 +- .../daily_test/test_daily_async_speak_rest.py | 2 +- .../daily_test/test_daily_listen_rest_file.py | 4 +- .../daily_test/test_daily_listen_rest_url.py | 4 +- tests/daily_test/test_daily_speak_rest.py | 2 +- .../auto_flush/async_microphone_mute/main.py | 2 +- .../auto_flush/microphone_mute/main.py | 2 +- .../reconnect_same_object/async/main.py | 2 +- .../reconnect_same_object/sync/main.py | 2 +- tests/edge_cases/usage_to_fast/main.py | 2 +- ...1219f4780ca70930b0a370ed2163a-options.json | 5 +- ...219f4780ca70930b0a370ed2163a-response.json | 263 ++++++++++- ...3fe1052ff1c7b090f7eaf8ede5b76-options.json | 5 +- ...fe1052ff1c7b090f7eaf8ede5b76-response.json | 416 ++++++++++++++++- ...1219f4780ca70930b0a370ed2163a-options.json | 6 +- ...219f4780ca70930b0a370ed2163a-response.json | 272 ++++++++++- ...3fe1052ff1c7b090f7eaf8ede5b76-options.json | 6 +- ...fe1052ff1c7b090f7eaf8ede5b76-response.json | 425 +++++++++++++++++- ...5f6f5187cd93d944cc94fa81c8469-options.json | 10 +- ...f6f5187cd93d944cc94fa81c8469-response.json | 74 ++- ...985c66ab177e9446fd14bbafd70df-options.json | 10 +- ...85c66ab177e9446fd14bbafd70df-response.json | 60 ++- tests/unit_test/conversation.txt | 10 +- .../test_unit_async_listen_rest_file.py | 6 +- .../test_unit_async_listen_rest_url.py | 6 +- .../test_unit_async_listen_websocket.py | 4 +- tests/unit_test/test_unit_listen_rest_file.py | 6 +- tests/unit_test/test_unit_listen_rest_url.py | 6 +- tests/unit_test/test_unit_listen_websocket.py | 4 +- 62 files changed, 1645 insertions(+), 109 deletions(-) diff --git a/README.md b/README.md index 6e8cf89e..901b00b0 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,7 @@ deepgram: DeepgramClient = DeepgramClient("", ClientOptionsFromEnv()) ## STEP 2 Call the transcribe_url method on the prerecorded class options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, ) response = deepgram.listen.rest.v("1").transcribe_url(AUDIO_URL, options) @@ -134,7 +134,7 @@ dg_connection.on(LiveTranscriptionEvents.Error, on_error) dg_connection.on(LiveTranscriptionEvents.Close, on_close) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", punctuate=True, language="en-US", encoding="linear16", diff --git a/deepgram/clients/agent/v1/websocket/options.py b/deepgram/clients/agent/v1/websocket/options.py index 6d843052..553994a0 100644 --- a/deepgram/clients/agent/v1/websocket/options.py +++ b/deepgram/clients/agent/v1/websocket/options.py @@ -23,7 +23,9 @@ class Listen(BaseResponse): This class defines any configuration settings for the Listen model. """ - model: Optional[str] = field(default="nova-2") + model: Optional[str] = field( + default=None, metadata=dataclass_config(exclude=lambda f: f is None) + ) @dataclass diff --git a/deepgram/clients/listen/v1/rest/options.py b/deepgram/clients/listen/v1/rest/options.py index 93d3871a..1c44d54e 100644 --- a/deepgram/clients/listen/v1/rest/options.py +++ b/deepgram/clients/listen/v1/rest/options.py @@ -82,6 +82,9 @@ class PrerecordedOptions(BaseResponse): # pylint: disable=too-many-instance-att intents: Optional[bool] = field( default=None, metadata=dataclass_config(exclude=lambda f: f is None) ) + keyterm: Optional[List[str]] = field( + default=None, metadata=dataclass_config(exclude=lambda f: f is None) + ) keywords: Optional[Union[List[str], str]] = field( default=None, metadata=dataclass_config(exclude=lambda f: f is None) ) @@ -92,7 +95,7 @@ class PrerecordedOptions(BaseResponse): # pylint: disable=too-many-instance-att default=None, metadata=dataclass_config(exclude=lambda f: f is None) ) model: Optional[str] = field( - default="nova-2", metadata=dataclass_config(exclude=lambda f: f is None) + default="None", metadata=dataclass_config(exclude=lambda f: f is None) ) multichannel: Optional[bool] = field( default=None, metadata=dataclass_config(exclude=lambda f: f is None) diff --git a/deepgram/clients/listen/v1/websocket/options.py b/deepgram/clients/listen/v1/websocket/options.py index dd2e775b..97b14105 100644 --- a/deepgram/clients/listen/v1/websocket/options.py +++ b/deepgram/clients/listen/v1/websocket/options.py @@ -68,11 +68,14 @@ class LiveOptions(BaseResponse): # pylint: disable=too-many-instance-attributes keywords: Optional[Union[List[str], str]] = field( default=None, metadata=dataclass_config(exclude=lambda f: f is None) ) + keyterm: Optional[List[str]] = field( + default=None, metadata=dataclass_config(exclude=lambda f: f is None) + ) language: Optional[str] = field( default=None, metadata=dataclass_config(exclude=lambda f: f is None) ) model: Optional[str] = field( - default="nova-2", metadata=dataclass_config(exclude=lambda f: f is None) + default="None", metadata=dataclass_config(exclude=lambda f: f is None) ) multichannel: Optional[bool] = field( default=None, metadata=dataclass_config(exclude=lambda f: f is None) diff --git a/examples/advanced/rest/direct_invocation/main.py b/examples/advanced/rest/direct_invocation/main.py index 452de4cb..cb33e3e6 100644 --- a/examples/advanced/rest/direct_invocation/main.py +++ b/examples/advanced/rest/direct_invocation/main.py @@ -29,7 +29,7 @@ def main(): # STEP 2 Call the transcribe_url method on the prerecorded class options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, summarize="v2", ) diff --git a/examples/advanced/websocket/direct_invocation/main.py b/examples/advanced/websocket/direct_invocation/main.py index 47c6f2a8..186de646 100644 --- a/examples/advanced/websocket/direct_invocation/main.py +++ b/examples/advanced/websocket/direct_invocation/main.py @@ -58,7 +58,7 @@ def on_error(self, error, **kwargs): liveClient.on(LiveTranscriptionEvents.Error, on_error) # connect to websocket - options: LiveOptions = LiveOptions(model="nova-2", language="en-US") + options: LiveOptions = LiveOptions(model="nova-3", language="en-US") if liveClient.start(options) is False: print("Failed to connect to Deepgram") diff --git a/examples/advanced/websocket/microphone_inheritance/main.py b/examples/advanced/websocket/microphone_inheritance/main.py index fedfcfbd..ac99f382 100644 --- a/examples/advanced/websocket/microphone_inheritance/main.py +++ b/examples/advanced/websocket/microphone_inheritance/main.py @@ -79,7 +79,7 @@ def main(): liveClient: MyLiveClient = MyLiveClient(ClientOptionsFromEnv()) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", punctuate=True, language="en-US", encoding="linear16", diff --git a/examples/advanced/websocket/mute-microphone/main.py b/examples/advanced/websocket/mute-microphone/main.py index 86e9a2b6..7b58ecdb 100644 --- a/examples/advanced/websocket/mute-microphone/main.py +++ b/examples/advanced/websocket/mute-microphone/main.py @@ -66,7 +66,7 @@ def on_error(self, error, **kwargs): dg_connection.on(LiveTranscriptionEvents.Error, on_error) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", punctuate=True, language="en-US", encoding="linear16", diff --git a/examples/analyze/intent/conversation.txt b/examples/analyze/intent/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/examples/analyze/intent/conversation.txt +++ b/examples/analyze/intent/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/examples/analyze/legacy_dict_intent/conversation.txt b/examples/analyze/legacy_dict_intent/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/examples/analyze/legacy_dict_intent/conversation.txt +++ b/examples/analyze/legacy_dict_intent/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/examples/analyze/sentiment/conversation.txt b/examples/analyze/sentiment/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/examples/analyze/sentiment/conversation.txt +++ b/examples/analyze/sentiment/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/examples/analyze/stream_intent/conversation.txt b/examples/analyze/stream_intent/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/examples/analyze/stream_intent/conversation.txt +++ b/examples/analyze/stream_intent/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/examples/analyze/summary/conversation.txt b/examples/analyze/summary/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/examples/analyze/summary/conversation.txt +++ b/examples/analyze/summary/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/examples/analyze/topic/conversation.txt b/examples/analyze/topic/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/examples/analyze/topic/conversation.txt +++ b/examples/analyze/topic/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/examples/speech-to-text/rest/async_file/main.py b/examples/speech-to-text/rest/async_file/main.py index ae1ea62e..adaa5b74 100644 --- a/examples/speech-to-text/rest/async_file/main.py +++ b/examples/speech-to-text/rest/async_file/main.py @@ -41,7 +41,7 @@ async def main(): } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/examples/speech-to-text/rest/async_url/main.py b/examples/speech-to-text/rest/async_url/main.py index faae2756..55135105 100644 --- a/examples/speech-to-text/rest/async_url/main.py +++ b/examples/speech-to-text/rest/async_url/main.py @@ -16,7 +16,7 @@ } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, summarize="v2", ) diff --git a/examples/speech-to-text/rest/callback/callback/main.py b/examples/speech-to-text/rest/callback/callback/main.py index 0a41fe35..d0859bd6 100644 --- a/examples/speech-to-text/rest/callback/callback/main.py +++ b/examples/speech-to-text/rest/callback/callback/main.py @@ -46,7 +46,7 @@ def main(): # } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, ) diff --git a/examples/speech-to-text/rest/file/main.py b/examples/speech-to-text/rest/file/main.py index 5cc1080c..07c2d1fa 100644 --- a/examples/speech-to-text/rest/file/main.py +++ b/examples/speech-to-text/rest/file/main.py @@ -40,7 +40,7 @@ def main(): } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/examples/speech-to-text/rest/intent/main.py b/examples/speech-to-text/rest/intent/main.py index df80e7b0..d95f4aed 100644 --- a/examples/speech-to-text/rest/intent/main.py +++ b/examples/speech-to-text/rest/intent/main.py @@ -39,7 +39,7 @@ def main(): } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/examples/speech-to-text/rest/legacy_dict_url/main.py b/examples/speech-to-text/rest/legacy_dict_url/main.py index 858fb947..b7ea770c 100644 --- a/examples/speech-to-text/rest/legacy_dict_url/main.py +++ b/examples/speech-to-text/rest/legacy_dict_url/main.py @@ -27,7 +27,7 @@ def main(): # STEP 2 Call the transcribe_url method on the rest class options = { - "mode": "nova-2", + "mode": "nova-3", "smart_format": True, } response = deepgram.listen.rest.v("1").transcribe_url(AUDIO_URL, options) diff --git a/examples/speech-to-text/rest/sentiment/main.py b/examples/speech-to-text/rest/sentiment/main.py index feac70ee..c94dd1ba 100644 --- a/examples/speech-to-text/rest/sentiment/main.py +++ b/examples/speech-to-text/rest/sentiment/main.py @@ -39,7 +39,7 @@ def main(): } options = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/examples/speech-to-text/rest/stream_file/main.py b/examples/speech-to-text/rest/stream_file/main.py index 434a8277..beace65a 100644 --- a/examples/speech-to-text/rest/stream_file/main.py +++ b/examples/speech-to-text/rest/stream_file/main.py @@ -39,7 +39,7 @@ def main(): "stream": stream, } options = PrerecordedOptions( - model="nova-2", + model="nova-3", ) response = deepgram.listen.rest.v("1").transcribe_file(payload, options) print(response.to_json(indent=4)) diff --git a/examples/speech-to-text/rest/summary/main.py b/examples/speech-to-text/rest/summary/main.py index e5d05af1..b65b8f50 100644 --- a/examples/speech-to-text/rest/summary/main.py +++ b/examples/speech-to-text/rest/summary/main.py @@ -39,7 +39,7 @@ def main(): } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/examples/speech-to-text/rest/topic/main.py b/examples/speech-to-text/rest/topic/main.py index 68ad728d..dbc49c95 100644 --- a/examples/speech-to-text/rest/topic/main.py +++ b/examples/speech-to-text/rest/topic/main.py @@ -39,7 +39,7 @@ def main(): } options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/examples/speech-to-text/rest/url/main.py b/examples/speech-to-text/rest/url/main.py index 7831bf94..e28b5594 100644 --- a/examples/speech-to-text/rest/url/main.py +++ b/examples/speech-to-text/rest/url/main.py @@ -27,7 +27,7 @@ def main(): # STEP 2 Call the transcribe_url method on the rest class options: PrerecordedOptions = PrerecordedOptions( - model="nova-2", + model="nova-3", smart_format=True, ) response = deepgram.listen.rest.v("1").transcribe_url(AUDIO_URL, options) diff --git a/examples/speech-to-text/websocket/async_http/main.py b/examples/speech-to-text/websocket/async_http/main.py index cd5a20ec..ad403248 100644 --- a/examples/speech-to-text/websocket/async_http/main.py +++ b/examples/speech-to-text/websocket/async_http/main.py @@ -84,7 +84,7 @@ async def on_unhandled(self, unhandled, **kwargs): # connect to websocket options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", ) diff --git a/examples/speech-to-text/websocket/async_microphone/main.py b/examples/speech-to-text/websocket/async_microphone/main.py index 85e3b17c..bf52ddbf 100644 --- a/examples/speech-to-text/websocket/async_microphone/main.py +++ b/examples/speech-to-text/websocket/async_microphone/main.py @@ -105,7 +105,7 @@ async def on_unhandled(self, unhandled, **kwargs): # connect to websocket options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", # Apply smart formatting to the output smart_format=True, diff --git a/examples/speech-to-text/websocket/http/main.py b/examples/speech-to-text/websocket/http/main.py index f26cec72..40848650 100644 --- a/examples/speech-to-text/websocket/http/main.py +++ b/examples/speech-to-text/websocket/http/main.py @@ -69,7 +69,7 @@ def on_unhandled(self, unhandled, **kwargs): dg_connection.on(LiveTranscriptionEvents.Unhandled, on_unhandled) # connect to websocket - options = LiveOptions(model="nova-2", language="en-US") + options = LiveOptions(model="nova-3", language="en-US") print("\n\nPress Enter to stop recording...\n\n") if dg_connection.start(options) is False: diff --git a/examples/speech-to-text/websocket/legacy_dict_microphone/main.py b/examples/speech-to-text/websocket/legacy_dict_microphone/main.py index e1023243..afed691d 100644 --- a/examples/speech-to-text/websocket/legacy_dict_microphone/main.py +++ b/examples/speech-to-text/websocket/legacy_dict_microphone/main.py @@ -68,7 +68,7 @@ def on_unhandled(self, unhandled, **kwargs): dg_connection.on(LiveTranscriptionEvents.Unhandled, on_unhandled) options = { - "model": "nova-2", + "model": "nova-3", "punctuate": True, "language": "en-US", "encoding": "linear16", diff --git a/examples/speech-to-text/websocket/microphone/main.py b/examples/speech-to-text/websocket/microphone/main.py index fdc3df89..516d2213 100644 --- a/examples/speech-to-text/websocket/microphone/main.py +++ b/examples/speech-to-text/websocket/microphone/main.py @@ -94,7 +94,7 @@ def on_unhandled(self, unhandled, **kwargs): dg_connection.on(LiveTranscriptionEvents.Unhandled, on_unhandled) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", # Apply smart formatting to the output smart_format=True, diff --git a/examples/speech-to-text/websocket/replay/main.py b/examples/speech-to-text/websocket/replay/main.py index f83416fe..b223042e 100644 --- a/examples/speech-to-text/websocket/replay/main.py +++ b/examples/speech-to-text/websocket/replay/main.py @@ -67,7 +67,7 @@ def on_error(self, error, **kwargs): # connect to websocket options = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", encoding="linear16", sample_rate=22050, diff --git a/tests/daily_test/conversation.txt b/tests/daily_test/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/tests/daily_test/conversation.txt +++ b/tests/daily_test/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/tests/daily_test/test_daily_async_listen_rest_file.py b/tests/daily_test/test_daily_async_listen_rest_file.py index 6386347e..07f88499 100644 --- a/tests/daily_test/test_daily_async_listen_rest_file.py +++ b/tests/daily_test/test_daily_async_listen_rest_file.py @@ -23,12 +23,12 @@ input_output = [ ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT]}, ), ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT], "results.summary.short": [ diff --git a/tests/daily_test/test_daily_async_listen_rest_url.py b/tests/daily_test/test_daily_async_listen_rest_url.py index a3765259..54a3abf4 100644 --- a/tests/daily_test/test_daily_async_listen_rest_url.py +++ b/tests/daily_test/test_daily_async_listen_rest_url.py @@ -25,12 +25,12 @@ input_output = [ ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT]}, ), ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT], "results.summary.short": [URL1_SUMMARIZE], diff --git a/tests/daily_test/test_daily_async_speak_rest.py b/tests/daily_test/test_daily_async_speak_rest.py index ddda2869..234674b9 100644 --- a/tests/daily_test/test_daily_async_speak_rest.py +++ b/tests/daily_test/test_daily_async_speak_rest.py @@ -23,7 +23,7 @@ ( TEXT1, SpeakOptions(model=TTS_MODEL, encoding="linear16", sample_rate=24000), - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [TEXT1]}, ), ] diff --git a/tests/daily_test/test_daily_listen_rest_file.py b/tests/daily_test/test_daily_listen_rest_file.py index 151850b0..f23a7282 100644 --- a/tests/daily_test/test_daily_listen_rest_file.py +++ b/tests/daily_test/test_daily_listen_rest_file.py @@ -23,12 +23,12 @@ input_output = [ ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT]}, ), ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT], "results.summary.short": [ diff --git a/tests/daily_test/test_daily_listen_rest_url.py b/tests/daily_test/test_daily_listen_rest_url.py index 98c42e42..3807591c 100644 --- a/tests/daily_test/test_daily_listen_rest_url.py +++ b/tests/daily_test/test_daily_listen_rest_url.py @@ -25,12 +25,12 @@ input_output = [ ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT]}, ), ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT], "results.summary.short": [URL1_SUMMARIZE], diff --git a/tests/daily_test/test_daily_speak_rest.py b/tests/daily_test/test_daily_speak_rest.py index 11e56b1d..17d409f3 100644 --- a/tests/daily_test/test_daily_speak_rest.py +++ b/tests/daily_test/test_daily_speak_rest.py @@ -23,7 +23,7 @@ ( TEXT1, SpeakOptions(model=TTS_MODEL, encoding="linear16", sample_rate=24000), - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [TEXT1]}, ), ] diff --git a/tests/edge_cases/auto_flush/async_microphone_mute/main.py b/tests/edge_cases/auto_flush/async_microphone_mute/main.py index 8f0e8387..56182fe9 100644 --- a/tests/edge_cases/auto_flush/async_microphone_mute/main.py +++ b/tests/edge_cases/auto_flush/async_microphone_mute/main.py @@ -106,7 +106,7 @@ async def on_unhandled(self, unhandled, **kwargs): # connect to websocket options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", # Apply smart formatting to the output smart_format=True, diff --git a/tests/edge_cases/auto_flush/microphone_mute/main.py b/tests/edge_cases/auto_flush/microphone_mute/main.py index 6bacb1c8..2d340ebe 100644 --- a/tests/edge_cases/auto_flush/microphone_mute/main.py +++ b/tests/edge_cases/auto_flush/microphone_mute/main.py @@ -92,7 +92,7 @@ def on_unhandled(self, unhandled, **kwargs): dg_connection.on(LiveTranscriptionEvents.Unhandled, on_unhandled) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", # Apply smart formatting to the output smart_format=True, diff --git a/tests/edge_cases/reconnect_same_object/async/main.py b/tests/edge_cases/reconnect_same_object/async/main.py index 81824d85..8db07944 100644 --- a/tests/edge_cases/reconnect_same_object/async/main.py +++ b/tests/edge_cases/reconnect_same_object/async/main.py @@ -25,7 +25,7 @@ async def main(): # config: DeepgramClientOptions = DeepgramClientOptions() deepgram: DeepgramClient = DeepgramClient("", config) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", encoding="linear16", channels=1, diff --git a/tests/edge_cases/reconnect_same_object/sync/main.py b/tests/edge_cases/reconnect_same_object/sync/main.py index ca797c3b..52b7a3c8 100644 --- a/tests/edge_cases/reconnect_same_object/sync/main.py +++ b/tests/edge_cases/reconnect_same_object/sync/main.py @@ -10,7 +10,7 @@ def main(): # config: DeepgramClientOptions = DeepgramClientOptions() deepgram: DeepgramClient = DeepgramClient("", config) options: LiveOptions = LiveOptions( - model="nova-2", + model="nova-3", language="en-US", encoding="linear16", channels=1, diff --git a/tests/edge_cases/usage_to_fast/main.py b/tests/edge_cases/usage_to_fast/main.py index 474ffb9a..42af7210 100644 --- a/tests/edge_cases/usage_to_fast/main.py +++ b/tests/edge_cases/usage_to_fast/main.py @@ -52,7 +52,7 @@ def main(): options: PrerecordedOptions = PrerecordedOptions( callback="http://example.com", - model="nova-2", + model="nova-3", smart_format=True, utterances=True, punctuate=True, diff --git a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json index 66dd9bf2..f188b261 100644 --- a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json +++ b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json @@ -1 +1,4 @@ -{"model": "nova-2", "smart_format": true} \ No newline at end of file +{ + "model": "nova-3", + "smart_format": true +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json index 64dda057..be69902c 100644 --- a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json +++ b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json @@ -1 +1,262 @@ -{"metadata": {"transaction_key": "deprecated", "request_id": "e171e35d-e7d5-4bc3-9467-16186ea7f37b", "sha256": "5324da68ede209a16ac69a38e8cd29cee4d754434a041166cda3a1f5e0b24566", "created": "2024-08-02T09:02:26.026Z", "duration": 17.566313, "channels": 1, "models": ["30089e05-99d1-4376-b32e-c263170674af"], "model_info": {"30089e05-99d1-4376-b32e-c263170674af": {"name": "2-general-nova", "version": "2024-01-09.29447", "arch": "nova-2"}}}, "results": {"channels": [{"alternatives": [{"transcript": "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "confidence": 0.9982276, "words": [{"word": "yep", "start": 5.52, "end": 6.02, "confidence": 0.9984803, "punctuated_word": "Yep."}, {"word": "i", "start": 7.095, "end": 7.2549996, "confidence": 0.8246853, "punctuated_word": "I"}, {"word": "said", "start": 7.2549996, "end": 7.415, "confidence": 0.9356655, "punctuated_word": "said"}, {"word": "it", "start": 7.415, "end": 7.495, "confidence": 0.9983712, "punctuated_word": "it"}, {"word": "before", "start": 7.495, "end": 7.975, "confidence": 0.9997557, "punctuated_word": "before"}, {"word": "and", "start": 7.975, "end": 8.135, "confidence": 0.5637248, "punctuated_word": "and"}, {"word": "i'll", "start": 8.135, "end": 8.295, "confidence": 0.9982276, "punctuated_word": "I'll"}, {"word": "say", "start": 8.295, "end": 8.455, "confidence": 0.9986798, "punctuated_word": "say"}, {"word": "it", "start": 8.455, "end": 8.615, "confidence": 0.99852693, "punctuated_word": "it"}, {"word": "again", "start": 8.615, "end": 9.115, "confidence": 0.8460283, "punctuated_word": "again."}, {"word": "life", "start": 9.975, "end": 10.295, "confidence": 0.9956424, "punctuated_word": "Life"}, {"word": "moves", "start": 10.295, "end": 10.695, "confidence": 0.99854505, "punctuated_word": "moves"}, {"word": "pretty", "start": 10.695, "end": 11.014999, "confidence": 0.999373, "punctuated_word": "pretty"}, {"word": "fast", "start": 11.014999, "end": 11.514999, "confidence": 0.99929106, "punctuated_word": "fast."}, {"word": "you", "start": 11.975, "end": 12.215, "confidence": 0.9474222, "punctuated_word": "You"}, {"word": "don't", "start": 12.215, "end": 12.455, "confidence": 0.99980545, "punctuated_word": "don't"}, {"word": "stop", "start": 12.455, "end": 12.695, "confidence": 0.9998281, "punctuated_word": "stop"}, {"word": "and", "start": 12.695, "end": 12.855, "confidence": 0.99847394, "punctuated_word": "and"}, {"word": "look", "start": 12.855, "end": 13.014999, "confidence": 0.99972683, "punctuated_word": "look"}, {"word": "around", "start": 13.014999, "end": 13.334999, "confidence": 0.9994735, "punctuated_word": "around"}, {"word": "once", "start": 13.334999, "end": 13.575, "confidence": 0.99803585, "punctuated_word": "once"}, {"word": "in", "start": 13.575, "end": 13.735, "confidence": 0.99710685, "punctuated_word": "in"}, {"word": "a", "start": 13.735, "end": 13.815, "confidence": 0.95409834, "punctuated_word": "a"}, {"word": "while", "start": 13.815, "end": 14.315, "confidence": 0.9718096, "punctuated_word": "while,"}, {"word": "you", "start": 14.561313, "end": 14.7213125, "confidence": 0.9899907, "punctuated_word": "you"}, {"word": "could", "start": 14.7213125, "end": 14.961312, "confidence": 0.9966583, "punctuated_word": "could"}, {"word": "miss", "start": 14.961312, "end": 15.461312, "confidence": 0.99738425, "punctuated_word": "miss"}, {"word": "it", "start": 17.281313, "end": 17.566313, "confidence": 0.99003756, "punctuated_word": "it."}], "paragraphs": {"transcript": "\nYep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "paragraphs": [{"sentences": [{"text": "Yep.", "start": 5.52, "end": 6.02}, {"text": "I said it before and I'll say it again.", "start": 7.095, "end": 9.115}, {"text": "Life moves pretty fast.", "start": 9.975, "end": 11.514999}, {"text": "You don't stop and look around once in a while, you could miss it.", "start": 11.975, "end": 17.566313}], "start": 5.52, "end": 17.566313, "num_words": 28}]}}]}]}} \ No newline at end of file +{ + "metadata": { + "transaction_key": "deprecated", + "request_id": "e171e35d-e7d5-4bc3-9467-16186ea7f37b", + "sha256": "5324da68ede209a16ac69a38e8cd29cee4d754434a041166cda3a1f5e0b24566", + "created": "2024-08-02T09:02:26.026Z", + "duration": 17.566313, + "channels": 1, + "models": [ + "30089e05-99d1-4376-b32e-c263170674af" + ], + "model_info": { + "30089e05-99d1-4376-b32e-c263170674af": { + "name": "2-general-nova", + "version": "2024-01-09.29447", + "arch": "nova-3" + } + } + }, + "results": { + "channels": [ + { + "alternatives": [ + { + "transcript": "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", + "confidence": 0.9982276, + "words": [ + { + "word": "yep", + "start": 5.52, + "end": 6.02, + "confidence": 0.9984803, + "punctuated_word": "Yep." + }, + { + "word": "i", + "start": 7.095, + "end": 7.2549996, + "confidence": 0.8246853, + "punctuated_word": "I" + }, + { + "word": "said", + "start": 7.2549996, + "end": 7.415, + "confidence": 0.9356655, + "punctuated_word": "said" + }, + { + "word": "it", + "start": 7.415, + "end": 7.495, + "confidence": 0.9983712, + "punctuated_word": "it" + }, + { + "word": "before", + "start": 7.495, + "end": 7.975, + "confidence": 0.9997557, + "punctuated_word": "before" + }, + { + "word": "and", + "start": 7.975, + "end": 8.135, + "confidence": 0.5637248, + "punctuated_word": "and" + }, + { + "word": "i'll", + "start": 8.135, + "end": 8.295, + "confidence": 0.9982276, + "punctuated_word": "I'll" + }, + { + "word": "say", + "start": 8.295, + "end": 8.455, + "confidence": 0.9986798, + "punctuated_word": "say" + }, + { + "word": "it", + "start": 8.455, + "end": 8.615, + "confidence": 0.99852693, + "punctuated_word": "it" + }, + { + "word": "again", + "start": 8.615, + "end": 9.115, + "confidence": 0.8460283, + "punctuated_word": "again." + }, + { + "word": "life", + "start": 9.975, + "end": 10.295, + "confidence": 0.9956424, + "punctuated_word": "Life" + }, + { + "word": "moves", + "start": 10.295, + "end": 10.695, + "confidence": 0.99854505, + "punctuated_word": "moves" + }, + { + "word": "pretty", + "start": 10.695, + "end": 11.014999, + "confidence": 0.999373, + "punctuated_word": "pretty" + }, + { + "word": "fast", + "start": 11.014999, + "end": 11.514999, + "confidence": 0.99929106, + "punctuated_word": "fast." + }, + { + "word": "you", + "start": 11.975, + "end": 12.215, + "confidence": 0.9474222, + "punctuated_word": "You" + }, + { + "word": "don't", + "start": 12.215, + "end": 12.455, + "confidence": 0.99980545, + "punctuated_word": "don't" + }, + { + "word": "stop", + "start": 12.455, + "end": 12.695, + "confidence": 0.9998281, + "punctuated_word": "stop" + }, + { + "word": "and", + "start": 12.695, + "end": 12.855, + "confidence": 0.99847394, + "punctuated_word": "and" + }, + { + "word": "look", + "start": 12.855, + "end": 13.014999, + "confidence": 0.99972683, + "punctuated_word": "look" + }, + { + "word": "around", + "start": 13.014999, + "end": 13.334999, + "confidence": 0.9994735, + "punctuated_word": "around" + }, + { + "word": "once", + "start": 13.334999, + "end": 13.575, + "confidence": 0.99803585, + "punctuated_word": "once" + }, + { + "word": "in", + "start": 13.575, + "end": 13.735, + "confidence": 0.99710685, + "punctuated_word": "in" + }, + { + "word": "a", + "start": 13.735, + "end": 13.815, + "confidence": 0.95409834, + "punctuated_word": "a" + }, + { + "word": "while", + "start": 13.815, + "end": 14.315, + "confidence": 0.9718096, + "punctuated_word": "while," + }, + { + "word": "you", + "start": 14.561313, + "end": 14.7213125, + "confidence": 0.9899907, + "punctuated_word": "you" + }, + { + "word": "could", + "start": 14.7213125, + "end": 14.961312, + "confidence": 0.9966583, + "punctuated_word": "could" + }, + { + "word": "miss", + "start": 14.961312, + "end": 15.461312, + "confidence": 0.99738425, + "punctuated_word": "miss" + }, + { + "word": "it", + "start": 17.281313, + "end": 17.566313, + "confidence": 0.99003756, + "punctuated_word": "it." + } + ], + "paragraphs": { + "transcript": "\nYep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", + "paragraphs": [ + { + "sentences": [ + { + "text": "Yep.", + "start": 5.52, + "end": 6.02 + }, + { + "text": "I said it before and I'll say it again.", + "start": 7.095, + "end": 9.115 + }, + { + "text": "Life moves pretty fast.", + "start": 9.975, + "end": 11.514999 + }, + { + "text": "You don't stop and look around once in a while, you could miss it.", + "start": 11.975, + "end": 17.566313 + } + ], + "start": 5.52, + "end": 17.566313, + "num_words": 28 + } + ] + } + } + ] + } + ] + } +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json index 66dd9bf2..f188b261 100644 --- a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json +++ b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json @@ -1 +1,4 @@ -{"model": "nova-2", "smart_format": true} \ No newline at end of file +{ + "model": "nova-3", + "smart_format": true +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json index 36553e57..a5bc70ef 100644 --- a/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json +++ b/tests/response_data/listen/rest/b00dc103a62ea2ccfc752ec0f646c7528ef5e729a9d7481d2a944253a9128ce2-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json @@ -1 +1,415 @@ -{"metadata": {"transaction_key": "deprecated", "request_id": "5d5d60ea-c711-4ca1-822f-6427d7227119", "sha256": "95dc40091b6a8456a1554ddfc4f163768217afd66bee70a10c74bb52805cd0d9", "created": "2024-08-02T09:02:23.218Z", "duration": 19.097937, "channels": 1, "models": ["30089e05-99d1-4376-b32e-c263170674af"], "model_info": {"30089e05-99d1-4376-b32e-c263170674af": {"name": "2-general-nova", "version": "2024-01-09.29447", "arch": "nova-2"}}}, "results": {"channels": [{"alternatives": [{"transcript": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", "confidence": 0.99925333, "words": [{"word": "we", "start": 0.32, "end": 0.64, "confidence": 0.9507078, "punctuated_word": "We,"}, {"word": "the", "start": 0.64, "end": 0.88, "confidence": 0.99701893, "punctuated_word": "the"}, {"word": "people", "start": 0.88, "end": 1.1999999, "confidence": 0.97335726, "punctuated_word": "people"}, {"word": "of", "start": 1.1999999, "end": 1.4399999, "confidence": 0.9990202, "punctuated_word": "of"}, {"word": "the", "start": 1.4399999, "end": 1.52, "confidence": 0.99912447, "punctuated_word": "the"}, {"word": "united", "start": 1.52, "end": 1.92, "confidence": 0.99964714, "punctuated_word": "United"}, {"word": "states", "start": 1.92, "end": 2.3999999, "confidence": 0.99164474, "punctuated_word": "States,"}, {"word": "in", "start": 2.3999999, "end": 2.56, "confidence": 0.9996747, "punctuated_word": "in"}, {"word": "order", "start": 2.56, "end": 2.8799999, "confidence": 0.99996233, "punctuated_word": "order"}, {"word": "to", "start": 2.8799999, "end": 3.04, "confidence": 0.9996681, "punctuated_word": "to"}, {"word": "form", "start": 3.04, "end": 3.28, "confidence": 0.9995727, "punctuated_word": "form"}, {"word": "a", "start": 3.28, "end": 3.36, "confidence": 0.9997249, "punctuated_word": "a"}, {"word": "more", "start": 3.36, "end": 3.6, "confidence": 0.9999571, "punctuated_word": "more"}, {"word": "perfect", "start": 3.6, "end": 3.9199998, "confidence": 0.99989784, "punctuated_word": "perfect"}, {"word": "union", "start": 3.9199998, "end": 4.42, "confidence": 0.9926925, "punctuated_word": "union,"}, {"word": "establish", "start": 4.72, "end": 5.22, "confidence": 0.9708728, "punctuated_word": "establish"}, {"word": "justice", "start": 5.2799997, "end": 5.7799997, "confidence": 0.9977604, "punctuated_word": "justice,"}, {"word": "ensure", "start": 6.0, "end": 6.3999996, "confidence": 0.92025393, "punctuated_word": "ensure"}, {"word": "domestic", "start": 6.3999996, "end": 6.8799996, "confidence": 0.98635405, "punctuated_word": "domestic"}, {"word": "tranquility", "start": 6.8799996, "end": 7.3799996, "confidence": 0.99842614, "punctuated_word": "tranquility,"}, {"word": "provide", "start": 7.9199996, "end": 8.24, "confidence": 0.99980646, "punctuated_word": "provide"}, {"word": "for", "start": 8.24, "end": 8.48, "confidence": 0.999933, "punctuated_word": "for"}, {"word": "the", "start": 8.48, "end": 8.559999, "confidence": 0.9998996, "punctuated_word": "the"}, {"word": "common", "start": 8.559999, "end": 8.88, "confidence": 0.99925333, "punctuated_word": "common"}, {"word": "defense", "start": 8.88, "end": 9.355, "confidence": 0.9988564, "punctuated_word": "defense,"}, {"word": "promote", "start": 9.594999, "end": 9.915, "confidence": 0.9908298, "punctuated_word": "promote"}, {"word": "the", "start": 9.915, "end": 10.075, "confidence": 0.9994215, "punctuated_word": "the"}, {"word": "general", "start": 10.075, "end": 10.554999, "confidence": 0.99770457, "punctuated_word": "general"}, {"word": "welfare", "start": 10.554999, "end": 10.955, "confidence": 0.9615764, "punctuated_word": "welfare,"}, {"word": "and", "start": 10.955, "end": 11.195, "confidence": 0.9998332, "punctuated_word": "and"}, {"word": "secure", "start": 11.195, "end": 11.514999, "confidence": 0.99982953, "punctuated_word": "secure"}, {"word": "the", "start": 11.514999, "end": 11.674999, "confidence": 0.9998596, "punctuated_word": "the"}, {"word": "blessings", "start": 11.674999, "end": 11.994999, "confidence": 0.99888176, "punctuated_word": "blessings"}, {"word": "of", "start": 11.994999, "end": 12.235, "confidence": 0.99994814, "punctuated_word": "of"}, {"word": "liberty", "start": 12.235, "end": 12.714999, "confidence": 0.94853485, "punctuated_word": "liberty"}, {"word": "to", "start": 12.714999, "end": 12.875, "confidence": 0.998273, "punctuated_word": "to"}, {"word": "ourselves", "start": 12.875, "end": 13.355, "confidence": 0.9997156, "punctuated_word": "ourselves"}, {"word": "and", "start": 13.355, "end": 13.514999, "confidence": 0.87407845, "punctuated_word": "and"}, {"word": "our", "start": 13.514999, "end": 13.674999, "confidence": 0.9995127, "punctuated_word": "our"}, {"word": "posterity", "start": 13.674999, "end": 14.174999, "confidence": 0.855065, "punctuated_word": "posterity"}, {"word": "to", "start": 14.554999, "end": 14.795, "confidence": 0.60667473, "punctuated_word": "to"}, {"word": "ordain", "start": 14.795, "end": 15.195, "confidence": 0.99927837, "punctuated_word": "ordain"}, {"word": "and", "start": 15.195, "end": 15.434999, "confidence": 0.99926096, "punctuated_word": "and"}, {"word": "establish", "start": 15.434999, "end": 15.934999, "confidence": 0.9976635, "punctuated_word": "establish"}, {"word": "this", "start": 15.994999, "end": 16.234999, "confidence": 0.9996747, "punctuated_word": "this"}, {"word": "constitution", "start": 16.234999, "end": 16.734999, "confidence": 0.93758965, "punctuated_word": "constitution"}, {"word": "for", "start": 16.875, "end": 17.115, "confidence": 0.9990464, "punctuated_word": "for"}, {"word": "the", "start": 17.115, "end": 17.275, "confidence": 0.99990296, "punctuated_word": "the"}, {"word": "united", "start": 17.275, "end": 17.595, "confidence": 0.99956614, "punctuated_word": "United"}, {"word": "states", "start": 17.595, "end": 17.914999, "confidence": 0.9997973, "punctuated_word": "States"}, {"word": "of", "start": 17.914999, "end": 18.075, "confidence": 0.99959224, "punctuated_word": "of"}, {"word": "america", "start": 18.075, "end": 18.575, "confidence": 0.9946667, "punctuated_word": "America."}], "paragraphs": {"transcript": "\nWe, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", "paragraphs": [{"sentences": [{"text": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", "start": 0.32, "end": 18.575}], "start": 0.32, "end": 18.575, "num_words": 52}]}}]}]}} \ No newline at end of file +{ + "metadata": { + "transaction_key": "deprecated", + "request_id": "5d5d60ea-c711-4ca1-822f-6427d7227119", + "sha256": "95dc40091b6a8456a1554ddfc4f163768217afd66bee70a10c74bb52805cd0d9", + "created": "2024-08-02T09:02:23.218Z", + "duration": 19.097937, + "channels": 1, + "models": [ + "30089e05-99d1-4376-b32e-c263170674af" + ], + "model_info": { + "30089e05-99d1-4376-b32e-c263170674af": { + "name": "2-general-nova", + "version": "2024-01-09.29447", + "arch": "nova-3" + } + } + }, + "results": { + "channels": [ + { + "alternatives": [ + { + "transcript": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", + "confidence": 0.99925333, + "words": [ + { + "word": "we", + "start": 0.32, + "end": 0.64, + "confidence": 0.9507078, + "punctuated_word": "We," + }, + { + "word": "the", + "start": 0.64, + "end": 0.88, + "confidence": 0.99701893, + "punctuated_word": "the" + }, + { + "word": "people", + "start": 0.88, + "end": 1.1999999, + "confidence": 0.97335726, + "punctuated_word": "people" + }, + { + "word": "of", + "start": 1.1999999, + "end": 1.4399999, + "confidence": 0.9990202, + "punctuated_word": "of" + }, + { + "word": "the", + "start": 1.4399999, + "end": 1.52, + "confidence": 0.99912447, + "punctuated_word": "the" + }, + { + "word": "united", + "start": 1.52, + "end": 1.92, + "confidence": 0.99964714, + "punctuated_word": "United" + }, + { + "word": "states", + "start": 1.92, + "end": 2.3999999, + "confidence": 0.99164474, + "punctuated_word": "States," + }, + { + "word": "in", + "start": 2.3999999, + "end": 2.56, + "confidence": 0.9996747, + "punctuated_word": "in" + }, + { + "word": "order", + "start": 2.56, + "end": 2.8799999, + "confidence": 0.99996233, + "punctuated_word": "order" + }, + { + "word": "to", + "start": 2.8799999, + "end": 3.04, + "confidence": 0.9996681, + "punctuated_word": "to" + }, + { + "word": "form", + "start": 3.04, + "end": 3.28, + "confidence": 0.9995727, + "punctuated_word": "form" + }, + { + "word": "a", + "start": 3.28, + "end": 3.36, + "confidence": 0.9997249, + "punctuated_word": "a" + }, + { + "word": "more", + "start": 3.36, + "end": 3.6, + "confidence": 0.9999571, + "punctuated_word": "more" + }, + { + "word": "perfect", + "start": 3.6, + "end": 3.9199998, + "confidence": 0.99989784, + "punctuated_word": "perfect" + }, + { + "word": "union", + "start": 3.9199998, + "end": 4.42, + "confidence": 0.9926925, + "punctuated_word": "union," + }, + { + "word": "establish", + "start": 4.72, + "end": 5.22, + "confidence": 0.9708728, + "punctuated_word": "establish" + }, + { + "word": "justice", + "start": 5.2799997, + "end": 5.7799997, + "confidence": 0.9977604, + "punctuated_word": "justice," + }, + { + "word": "ensure", + "start": 6.0, + "end": 6.3999996, + "confidence": 0.92025393, + "punctuated_word": "ensure" + }, + { + "word": "domestic", + "start": 6.3999996, + "end": 6.8799996, + "confidence": 0.98635405, + "punctuated_word": "domestic" + }, + { + "word": "tranquility", + "start": 6.8799996, + "end": 7.3799996, + "confidence": 0.99842614, + "punctuated_word": "tranquility," + }, + { + "word": "provide", + "start": 7.9199996, + "end": 8.24, + "confidence": 0.99980646, + "punctuated_word": "provide" + }, + { + "word": "for", + "start": 8.24, + "end": 8.48, + "confidence": 0.999933, + "punctuated_word": "for" + }, + { + "word": "the", + "start": 8.48, + "end": 8.559999, + "confidence": 0.9998996, + "punctuated_word": "the" + }, + { + "word": "common", + "start": 8.559999, + "end": 8.88, + "confidence": 0.99925333, + "punctuated_word": "common" + }, + { + "word": "defense", + "start": 8.88, + "end": 9.355, + "confidence": 0.9988564, + "punctuated_word": "defense," + }, + { + "word": "promote", + "start": 9.594999, + "end": 9.915, + "confidence": 0.9908298, + "punctuated_word": "promote" + }, + { + "word": "the", + "start": 9.915, + "end": 10.075, + "confidence": 0.9994215, + "punctuated_word": "the" + }, + { + "word": "general", + "start": 10.075, + "end": 10.554999, + "confidence": 0.99770457, + "punctuated_word": "general" + }, + { + "word": "welfare", + "start": 10.554999, + "end": 10.955, + "confidence": 0.9615764, + "punctuated_word": "welfare," + }, + { + "word": "and", + "start": 10.955, + "end": 11.195, + "confidence": 0.9998332, + "punctuated_word": "and" + }, + { + "word": "secure", + "start": 11.195, + "end": 11.514999, + "confidence": 0.99982953, + "punctuated_word": "secure" + }, + { + "word": "the", + "start": 11.514999, + "end": 11.674999, + "confidence": 0.9998596, + "punctuated_word": "the" + }, + { + "word": "blessings", + "start": 11.674999, + "end": 11.994999, + "confidence": 0.99888176, + "punctuated_word": "blessings" + }, + { + "word": "of", + "start": 11.994999, + "end": 12.235, + "confidence": 0.99994814, + "punctuated_word": "of" + }, + { + "word": "liberty", + "start": 12.235, + "end": 12.714999, + "confidence": 0.94853485, + "punctuated_word": "liberty" + }, + { + "word": "to", + "start": 12.714999, + "end": 12.875, + "confidence": 0.998273, + "punctuated_word": "to" + }, + { + "word": "ourselves", + "start": 12.875, + "end": 13.355, + "confidence": 0.9997156, + "punctuated_word": "ourselves" + }, + { + "word": "and", + "start": 13.355, + "end": 13.514999, + "confidence": 0.87407845, + "punctuated_word": "and" + }, + { + "word": "our", + "start": 13.514999, + "end": 13.674999, + "confidence": 0.9995127, + "punctuated_word": "our" + }, + { + "word": "posterity", + "start": 13.674999, + "end": 14.174999, + "confidence": 0.855065, + "punctuated_word": "posterity" + }, + { + "word": "to", + "start": 14.554999, + "end": 14.795, + "confidence": 0.60667473, + "punctuated_word": "to" + }, + { + "word": "ordain", + "start": 14.795, + "end": 15.195, + "confidence": 0.99927837, + "punctuated_word": "ordain" + }, + { + "word": "and", + "start": 15.195, + "end": 15.434999, + "confidence": 0.99926096, + "punctuated_word": "and" + }, + { + "word": "establish", + "start": 15.434999, + "end": 15.934999, + "confidence": 0.9976635, + "punctuated_word": "establish" + }, + { + "word": "this", + "start": 15.994999, + "end": 16.234999, + "confidence": 0.9996747, + "punctuated_word": "this" + }, + { + "word": "constitution", + "start": 16.234999, + "end": 16.734999, + "confidence": 0.93758965, + "punctuated_word": "constitution" + }, + { + "word": "for", + "start": 16.875, + "end": 17.115, + "confidence": 0.9990464, + "punctuated_word": "for" + }, + { + "word": "the", + "start": 17.115, + "end": 17.275, + "confidence": 0.99990296, + "punctuated_word": "the" + }, + { + "word": "united", + "start": 17.275, + "end": 17.595, + "confidence": 0.99956614, + "punctuated_word": "United" + }, + { + "word": "states", + "start": 17.595, + "end": 17.914999, + "confidence": 0.9997973, + "punctuated_word": "States" + }, + { + "word": "of", + "start": 17.914999, + "end": 18.075, + "confidence": 0.99959224, + "punctuated_word": "of" + }, + { + "word": "america", + "start": 18.075, + "end": 18.575, + "confidence": 0.9946667, + "punctuated_word": "America." + } + ], + "paragraphs": { + "transcript": "\nWe, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", + "paragraphs": [ + { + "sentences": [ + { + "text": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", + "start": 0.32, + "end": 18.575 + } + ], + "start": 0.32, + "end": 18.575, + "num_words": 52 + } + ] + } + } + ] + } + ] + } +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json index 1921e812..6904feba 100644 --- a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json +++ b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json @@ -1 +1,5 @@ -{"model": "nova-2", "smart_format": true, "summarize": "v2"} \ No newline at end of file +{ + "model": "nova-3", + "smart_format": true, + "summarize": "v2" +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json index 5b7e2fdb..f26d4337 100644 --- a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json +++ b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json @@ -1 +1,271 @@ -{"metadata": {"transaction_key": "deprecated", "request_id": "87b489fa-8fec-4782-9428-8f45f1c0601e", "sha256": "5324da68ede209a16ac69a38e8cd29cee4d754434a041166cda3a1f5e0b24566", "created": "2024-08-02T09:02:27.158Z", "duration": 17.566313, "channels": 1, "models": ["30089e05-99d1-4376-b32e-c263170674af"], "model_info": {"30089e05-99d1-4376-b32e-c263170674af": {"name": "2-general-nova", "version": "2024-01-09.29447", "arch": "nova-2"}}, "summary_info": {"input_tokens": 0, "output_tokens": 0, "model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a"}}, "results": {"channels": [{"alternatives": [{"transcript": "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "confidence": 0.9982247, "words": [{"word": "yep", "start": 5.52, "end": 6.02, "confidence": 0.9984784, "punctuated_word": "Yep."}, {"word": "i", "start": 7.095, "end": 7.2549996, "confidence": 0.8250077, "punctuated_word": "I"}, {"word": "said", "start": 7.2549996, "end": 7.415, "confidence": 0.93559414, "punctuated_word": "said"}, {"word": "it", "start": 7.415, "end": 7.495, "confidence": 0.99837035, "punctuated_word": "it"}, {"word": "before", "start": 7.495, "end": 7.975, "confidence": 0.9997557, "punctuated_word": "before"}, {"word": "and", "start": 7.975, "end": 8.135, "confidence": 0.56378216, "punctuated_word": "and"}, {"word": "i'll", "start": 8.135, "end": 8.295, "confidence": 0.9982247, "punctuated_word": "I'll"}, {"word": "say", "start": 8.295, "end": 8.455, "confidence": 0.99867934, "punctuated_word": "say"}, {"word": "it", "start": 8.455, "end": 8.615, "confidence": 0.99852437, "punctuated_word": "it"}, {"word": "again", "start": 8.615, "end": 9.115, "confidence": 0.84590423, "punctuated_word": "again."}, {"word": "life", "start": 9.975, "end": 10.295, "confidence": 0.9956418, "punctuated_word": "Life"}, {"word": "moves", "start": 10.295, "end": 10.695, "confidence": 0.9985448, "punctuated_word": "moves"}, {"word": "pretty", "start": 10.695, "end": 11.014999, "confidence": 0.99937254, "punctuated_word": "pretty"}, {"word": "fast", "start": 11.014999, "end": 11.514999, "confidence": 0.9992908, "punctuated_word": "fast."}, {"word": "you", "start": 11.975, "end": 12.215, "confidence": 0.9473312, "punctuated_word": "You"}, {"word": "don't", "start": 12.215, "end": 12.455, "confidence": 0.9998054, "punctuated_word": "don't"}, {"word": "stop", "start": 12.455, "end": 12.695, "confidence": 0.9998281, "punctuated_word": "stop"}, {"word": "and", "start": 12.695, "end": 12.855, "confidence": 0.99847513, "punctuated_word": "and"}, {"word": "look", "start": 12.855, "end": 13.014999, "confidence": 0.99972683, "punctuated_word": "look"}, {"word": "around", "start": 13.014999, "end": 13.334999, "confidence": 0.99947375, "punctuated_word": "around"}, {"word": "once", "start": 13.334999, "end": 13.575, "confidence": 0.9980363, "punctuated_word": "once"}, {"word": "in", "start": 13.575, "end": 13.735, "confidence": 0.9971052, "punctuated_word": "in"}, {"word": "a", "start": 13.735, "end": 13.815, "confidence": 0.95409375, "punctuated_word": "a"}, {"word": "while", "start": 13.815, "end": 14.315, "confidence": 0.97181904, "punctuated_word": "while,"}, {"word": "you", "start": 14.561313, "end": 14.7213125, "confidence": 0.9899934, "punctuated_word": "you"}, {"word": "could", "start": 14.7213125, "end": 14.961312, "confidence": 0.9966538, "punctuated_word": "could"}, {"word": "miss", "start": 14.961312, "end": 15.461312, "confidence": 0.9973839, "punctuated_word": "miss"}, {"word": "it", "start": 17.281313, "end": 17.566313, "confidence": 0.9900253, "punctuated_word": "it."}], "paragraphs": {"transcript": "\nYep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "paragraphs": [{"sentences": [{"text": "Yep.", "start": 5.52, "end": 6.02}, {"text": "I said it before and I'll say it again.", "start": 7.095, "end": 9.115}, {"text": "Life moves pretty fast.", "start": 9.975, "end": 11.514999}, {"text": "You don't stop and look around once in a while, you could miss it.", "start": 11.975, "end": 17.566313}], "start": 5.52, "end": 17.566313, "num_words": 28}]}}]}], "summary": {"result": "success", "short": "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it."}}} \ No newline at end of file +{ + "metadata": { + "transaction_key": "deprecated", + "request_id": "87b489fa-8fec-4782-9428-8f45f1c0601e", + "sha256": "5324da68ede209a16ac69a38e8cd29cee4d754434a041166cda3a1f5e0b24566", + "created": "2024-08-02T09:02:27.158Z", + "duration": 17.566313, + "channels": 1, + "models": [ + "30089e05-99d1-4376-b32e-c263170674af" + ], + "model_info": { + "30089e05-99d1-4376-b32e-c263170674af": { + "name": "2-general-nova", + "version": "2024-01-09.29447", + "arch": "nova-3" + } + }, + "summary_info": { + "input_tokens": 0, + "output_tokens": 0, + "model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a" + } + }, + "results": { + "channels": [ + { + "alternatives": [ + { + "transcript": "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", + "confidence": 0.9982247, + "words": [ + { + "word": "yep", + "start": 5.52, + "end": 6.02, + "confidence": 0.9984784, + "punctuated_word": "Yep." + }, + { + "word": "i", + "start": 7.095, + "end": 7.2549996, + "confidence": 0.8250077, + "punctuated_word": "I" + }, + { + "word": "said", + "start": 7.2549996, + "end": 7.415, + "confidence": 0.93559414, + "punctuated_word": "said" + }, + { + "word": "it", + "start": 7.415, + "end": 7.495, + "confidence": 0.99837035, + "punctuated_word": "it" + }, + { + "word": "before", + "start": 7.495, + "end": 7.975, + "confidence": 0.9997557, + "punctuated_word": "before" + }, + { + "word": "and", + "start": 7.975, + "end": 8.135, + "confidence": 0.56378216, + "punctuated_word": "and" + }, + { + "word": "i'll", + "start": 8.135, + "end": 8.295, + "confidence": 0.9982247, + "punctuated_word": "I'll" + }, + { + "word": "say", + "start": 8.295, + "end": 8.455, + "confidence": 0.99867934, + "punctuated_word": "say" + }, + { + "word": "it", + "start": 8.455, + "end": 8.615, + "confidence": 0.99852437, + "punctuated_word": "it" + }, + { + "word": "again", + "start": 8.615, + "end": 9.115, + "confidence": 0.84590423, + "punctuated_word": "again." + }, + { + "word": "life", + "start": 9.975, + "end": 10.295, + "confidence": 0.9956418, + "punctuated_word": "Life" + }, + { + "word": "moves", + "start": 10.295, + "end": 10.695, + "confidence": 0.9985448, + "punctuated_word": "moves" + }, + { + "word": "pretty", + "start": 10.695, + "end": 11.014999, + "confidence": 0.99937254, + "punctuated_word": "pretty" + }, + { + "word": "fast", + "start": 11.014999, + "end": 11.514999, + "confidence": 0.9992908, + "punctuated_word": "fast." + }, + { + "word": "you", + "start": 11.975, + "end": 12.215, + "confidence": 0.9473312, + "punctuated_word": "You" + }, + { + "word": "don't", + "start": 12.215, + "end": 12.455, + "confidence": 0.9998054, + "punctuated_word": "don't" + }, + { + "word": "stop", + "start": 12.455, + "end": 12.695, + "confidence": 0.9998281, + "punctuated_word": "stop" + }, + { + "word": "and", + "start": 12.695, + "end": 12.855, + "confidence": 0.99847513, + "punctuated_word": "and" + }, + { + "word": "look", + "start": 12.855, + "end": 13.014999, + "confidence": 0.99972683, + "punctuated_word": "look" + }, + { + "word": "around", + "start": 13.014999, + "end": 13.334999, + "confidence": 0.99947375, + "punctuated_word": "around" + }, + { + "word": "once", + "start": 13.334999, + "end": 13.575, + "confidence": 0.9980363, + "punctuated_word": "once" + }, + { + "word": "in", + "start": 13.575, + "end": 13.735, + "confidence": 0.9971052, + "punctuated_word": "in" + }, + { + "word": "a", + "start": 13.735, + "end": 13.815, + "confidence": 0.95409375, + "punctuated_word": "a" + }, + { + "word": "while", + "start": 13.815, + "end": 14.315, + "confidence": 0.97181904, + "punctuated_word": "while," + }, + { + "word": "you", + "start": 14.561313, + "end": 14.7213125, + "confidence": 0.9899934, + "punctuated_word": "you" + }, + { + "word": "could", + "start": 14.7213125, + "end": 14.961312, + "confidence": 0.9966538, + "punctuated_word": "could" + }, + { + "word": "miss", + "start": 14.961312, + "end": 15.461312, + "confidence": 0.9973839, + "punctuated_word": "miss" + }, + { + "word": "it", + "start": 17.281313, + "end": 17.566313, + "confidence": 0.9900253, + "punctuated_word": "it." + } + ], + "paragraphs": { + "transcript": "\nYep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", + "paragraphs": [ + { + "sentences": [ + { + "text": "Yep.", + "start": 5.52, + "end": 6.02 + }, + { + "text": "I said it before and I'll say it again.", + "start": 7.095, + "end": 9.115 + }, + { + "text": "Life moves pretty fast.", + "start": 9.975, + "end": 11.514999 + }, + { + "text": "You don't stop and look around once in a while, you could miss it.", + "start": 11.975, + "end": 17.566313 + } + ], + "start": 5.52, + "end": 17.566313, + "num_words": 28 + } + ] + } + } + ] + } + ], + "summary": { + "result": "success", + "short": "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it." + } + } +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json index 1921e812..6904feba 100644 --- a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json +++ b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json @@ -1 +1,5 @@ -{"model": "nova-2", "smart_format": true, "summarize": "v2"} \ No newline at end of file +{ + "model": "nova-3", + "smart_format": true, + "summarize": "v2" +} \ No newline at end of file diff --git a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json index a11e93a4..4723e5f3 100644 --- a/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json +++ b/tests/response_data/listen/rest/f3b6208a662156067a41bddd295a1a0a53ea34a268e27a8f1a9d7107aa99732f-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json @@ -1 +1,424 @@ -{"metadata": {"transaction_key": "deprecated", "request_id": "3cba8b9c-fb4b-4938-8324-5c50ffa2a946", "sha256": "95dc40091b6a8456a1554ddfc4f163768217afd66bee70a10c74bb52805cd0d9", "created": "2024-08-02T09:02:24.760Z", "duration": 19.097937, "channels": 1, "models": ["30089e05-99d1-4376-b32e-c263170674af"], "model_info": {"30089e05-99d1-4376-b32e-c263170674af": {"name": "2-general-nova", "version": "2024-01-09.29447", "arch": "nova-2"}}, "summary_info": {"input_tokens": 63, "output_tokens": 53, "model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a"}}, "results": {"channels": [{"alternatives": [{"transcript": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", "confidence": 0.99925345, "words": [{"word": "we", "start": 0.32, "end": 0.64, "confidence": 0.95071673, "punctuated_word": "We,"}, {"word": "the", "start": 0.64, "end": 0.88, "confidence": 0.9970252, "punctuated_word": "the"}, {"word": "people", "start": 0.88, "end": 1.1999999, "confidence": 0.97336787, "punctuated_word": "people"}, {"word": "of", "start": 1.1999999, "end": 1.4399999, "confidence": 0.9990214, "punctuated_word": "of"}, {"word": "the", "start": 1.4399999, "end": 1.52, "confidence": 0.9991234, "punctuated_word": "the"}, {"word": "united", "start": 1.52, "end": 1.92, "confidence": 0.9996474, "punctuated_word": "United"}, {"word": "states", "start": 1.92, "end": 2.3999999, "confidence": 0.9916526, "punctuated_word": "States,"}, {"word": "in", "start": 2.3999999, "end": 2.56, "confidence": 0.9996748, "punctuated_word": "in"}, {"word": "order", "start": 2.56, "end": 2.8799999, "confidence": 0.99996233, "punctuated_word": "order"}, {"word": "to", "start": 2.8799999, "end": 3.04, "confidence": 0.99966836, "punctuated_word": "to"}, {"word": "form", "start": 3.04, "end": 3.28, "confidence": 0.99957234, "punctuated_word": "form"}, {"word": "a", "start": 3.28, "end": 3.36, "confidence": 0.99972504, "punctuated_word": "a"}, {"word": "more", "start": 3.36, "end": 3.6, "confidence": 0.9999572, "punctuated_word": "more"}, {"word": "perfect", "start": 3.6, "end": 3.9199998, "confidence": 0.99989796, "punctuated_word": "perfect"}, {"word": "union", "start": 3.9199998, "end": 4.42, "confidence": 0.9926959, "punctuated_word": "union,"}, {"word": "establish", "start": 4.72, "end": 5.22, "confidence": 0.9708791, "punctuated_word": "establish"}, {"word": "justice", "start": 5.2799997, "end": 5.7799997, "confidence": 0.9977596, "punctuated_word": "justice,"}, {"word": "ensure", "start": 6.0, "end": 6.3999996, "confidence": 0.92022455, "punctuated_word": "ensure"}, {"word": "domestic", "start": 6.3999996, "end": 6.8799996, "confidence": 0.9863414, "punctuated_word": "domestic"}, {"word": "tranquility", "start": 6.8799996, "end": 7.3799996, "confidence": 0.9984266, "punctuated_word": "tranquility,"}, {"word": "provide", "start": 7.9199996, "end": 8.24, "confidence": 0.9998066, "punctuated_word": "provide"}, {"word": "for", "start": 8.24, "end": 8.48, "confidence": 0.99993324, "punctuated_word": "for"}, {"word": "the", "start": 8.48, "end": 8.559999, "confidence": 0.9998996, "punctuated_word": "the"}, {"word": "common", "start": 8.559999, "end": 8.88, "confidence": 0.99925345, "punctuated_word": "common"}, {"word": "defense", "start": 8.88, "end": 9.355, "confidence": 0.99885684, "punctuated_word": "defense,"}, {"word": "promote", "start": 9.594999, "end": 9.915, "confidence": 0.9908229, "punctuated_word": "promote"}, {"word": "the", "start": 9.915, "end": 10.075, "confidence": 0.9994222, "punctuated_word": "the"}, {"word": "general", "start": 10.075, "end": 10.554999, "confidence": 0.99770135, "punctuated_word": "general"}, {"word": "welfare", "start": 10.554999, "end": 10.955, "confidence": 0.9617263, "punctuated_word": "welfare,"}, {"word": "and", "start": 10.955, "end": 11.195, "confidence": 0.99983335, "punctuated_word": "and"}, {"word": "secure", "start": 11.195, "end": 11.514999, "confidence": 0.99982953, "punctuated_word": "secure"}, {"word": "the", "start": 11.514999, "end": 11.674999, "confidence": 0.9998596, "punctuated_word": "the"}, {"word": "blessings", "start": 11.674999, "end": 11.994999, "confidence": 0.9988814, "punctuated_word": "blessings"}, {"word": "of", "start": 11.994999, "end": 12.235, "confidence": 0.99994814, "punctuated_word": "of"}, {"word": "liberty", "start": 12.235, "end": 12.714999, "confidence": 0.9485822, "punctuated_word": "liberty"}, {"word": "to", "start": 12.714999, "end": 12.875, "confidence": 0.9982722, "punctuated_word": "to"}, {"word": "ourselves", "start": 12.875, "end": 13.355, "confidence": 0.9997156, "punctuated_word": "ourselves"}, {"word": "and", "start": 13.355, "end": 13.514999, "confidence": 0.87418187, "punctuated_word": "and"}, {"word": "our", "start": 13.514999, "end": 13.674999, "confidence": 0.99951303, "punctuated_word": "our"}, {"word": "posterity", "start": 13.674999, "end": 14.174999, "confidence": 0.85497844, "punctuated_word": "posterity"}, {"word": "to", "start": 14.554999, "end": 14.795, "confidence": 0.60699797, "punctuated_word": "to"}, {"word": "ordain", "start": 14.795, "end": 15.195, "confidence": 0.9992792, "punctuated_word": "ordain"}, {"word": "and", "start": 15.195, "end": 15.434999, "confidence": 0.9992617, "punctuated_word": "and"}, {"word": "establish", "start": 15.434999, "end": 15.934999, "confidence": 0.99766684, "punctuated_word": "establish"}, {"word": "this", "start": 15.994999, "end": 16.234999, "confidence": 0.9996753, "punctuated_word": "this"}, {"word": "constitution", "start": 16.234999, "end": 16.734999, "confidence": 0.93753284, "punctuated_word": "constitution"}, {"word": "for", "start": 16.875, "end": 17.115, "confidence": 0.9990471, "punctuated_word": "for"}, {"word": "the", "start": 17.115, "end": 17.275, "confidence": 0.9999032, "punctuated_word": "the"}, {"word": "united", "start": 17.275, "end": 17.595, "confidence": 0.9995665, "punctuated_word": "United"}, {"word": "states", "start": 17.595, "end": 17.914999, "confidence": 0.99979764, "punctuated_word": "States"}, {"word": "of", "start": 17.914999, "end": 18.075, "confidence": 0.99959284, "punctuated_word": "of"}, {"word": "america", "start": 18.075, "end": 18.575, "confidence": 0.9946651, "punctuated_word": "America."}], "paragraphs": {"transcript": "\nWe, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", "paragraphs": [{"sentences": [{"text": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", "start": 0.32, "end": 18.575}], "start": 0.32, "end": 18.575, "num_words": 52}]}}]}], "summary": {"result": "success", "short": "Speaker 1 discusses the goal of establishing a more perfect union, justice, and the common defense for the United States, as part of the Better Union movement. They emphasize the importance of these factors in securing the benefits of liberty for the United States and the world."}}} \ No newline at end of file +{ + "metadata": { + "transaction_key": "deprecated", + "request_id": "3cba8b9c-fb4b-4938-8324-5c50ffa2a946", + "sha256": "95dc40091b6a8456a1554ddfc4f163768217afd66bee70a10c74bb52805cd0d9", + "created": "2024-08-02T09:02:24.760Z", + "duration": 19.097937, + "channels": 1, + "models": [ + "30089e05-99d1-4376-b32e-c263170674af" + ], + "model_info": { + "30089e05-99d1-4376-b32e-c263170674af": { + "name": "2-general-nova", + "version": "2024-01-09.29447", + "arch": "nova-3" + } + }, + "summary_info": { + "input_tokens": 63, + "output_tokens": 53, + "model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a" + } + }, + "results": { + "channels": [ + { + "alternatives": [ + { + "transcript": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", + "confidence": 0.99925345, + "words": [ + { + "word": "we", + "start": 0.32, + "end": 0.64, + "confidence": 0.95071673, + "punctuated_word": "We," + }, + { + "word": "the", + "start": 0.64, + "end": 0.88, + "confidence": 0.9970252, + "punctuated_word": "the" + }, + { + "word": "people", + "start": 0.88, + "end": 1.1999999, + "confidence": 0.97336787, + "punctuated_word": "people" + }, + { + "word": "of", + "start": 1.1999999, + "end": 1.4399999, + "confidence": 0.9990214, + "punctuated_word": "of" + }, + { + "word": "the", + "start": 1.4399999, + "end": 1.52, + "confidence": 0.9991234, + "punctuated_word": "the" + }, + { + "word": "united", + "start": 1.52, + "end": 1.92, + "confidence": 0.9996474, + "punctuated_word": "United" + }, + { + "word": "states", + "start": 1.92, + "end": 2.3999999, + "confidence": 0.9916526, + "punctuated_word": "States," + }, + { + "word": "in", + "start": 2.3999999, + "end": 2.56, + "confidence": 0.9996748, + "punctuated_word": "in" + }, + { + "word": "order", + "start": 2.56, + "end": 2.8799999, + "confidence": 0.99996233, + "punctuated_word": "order" + }, + { + "word": "to", + "start": 2.8799999, + "end": 3.04, + "confidence": 0.99966836, + "punctuated_word": "to" + }, + { + "word": "form", + "start": 3.04, + "end": 3.28, + "confidence": 0.99957234, + "punctuated_word": "form" + }, + { + "word": "a", + "start": 3.28, + "end": 3.36, + "confidence": 0.99972504, + "punctuated_word": "a" + }, + { + "word": "more", + "start": 3.36, + "end": 3.6, + "confidence": 0.9999572, + "punctuated_word": "more" + }, + { + "word": "perfect", + "start": 3.6, + "end": 3.9199998, + "confidence": 0.99989796, + "punctuated_word": "perfect" + }, + { + "word": "union", + "start": 3.9199998, + "end": 4.42, + "confidence": 0.9926959, + "punctuated_word": "union," + }, + { + "word": "establish", + "start": 4.72, + "end": 5.22, + "confidence": 0.9708791, + "punctuated_word": "establish" + }, + { + "word": "justice", + "start": 5.2799997, + "end": 5.7799997, + "confidence": 0.9977596, + "punctuated_word": "justice," + }, + { + "word": "ensure", + "start": 6.0, + "end": 6.3999996, + "confidence": 0.92022455, + "punctuated_word": "ensure" + }, + { + "word": "domestic", + "start": 6.3999996, + "end": 6.8799996, + "confidence": 0.9863414, + "punctuated_word": "domestic" + }, + { + "word": "tranquility", + "start": 6.8799996, + "end": 7.3799996, + "confidence": 0.9984266, + "punctuated_word": "tranquility," + }, + { + "word": "provide", + "start": 7.9199996, + "end": 8.24, + "confidence": 0.9998066, + "punctuated_word": "provide" + }, + { + "word": "for", + "start": 8.24, + "end": 8.48, + "confidence": 0.99993324, + "punctuated_word": "for" + }, + { + "word": "the", + "start": 8.48, + "end": 8.559999, + "confidence": 0.9998996, + "punctuated_word": "the" + }, + { + "word": "common", + "start": 8.559999, + "end": 8.88, + "confidence": 0.99925345, + "punctuated_word": "common" + }, + { + "word": "defense", + "start": 8.88, + "end": 9.355, + "confidence": 0.99885684, + "punctuated_word": "defense," + }, + { + "word": "promote", + "start": 9.594999, + "end": 9.915, + "confidence": 0.9908229, + "punctuated_word": "promote" + }, + { + "word": "the", + "start": 9.915, + "end": 10.075, + "confidence": 0.9994222, + "punctuated_word": "the" + }, + { + "word": "general", + "start": 10.075, + "end": 10.554999, + "confidence": 0.99770135, + "punctuated_word": "general" + }, + { + "word": "welfare", + "start": 10.554999, + "end": 10.955, + "confidence": 0.9617263, + "punctuated_word": "welfare," + }, + { + "word": "and", + "start": 10.955, + "end": 11.195, + "confidence": 0.99983335, + "punctuated_word": "and" + }, + { + "word": "secure", + "start": 11.195, + "end": 11.514999, + "confidence": 0.99982953, + "punctuated_word": "secure" + }, + { + "word": "the", + "start": 11.514999, + "end": 11.674999, + "confidence": 0.9998596, + "punctuated_word": "the" + }, + { + "word": "blessings", + "start": 11.674999, + "end": 11.994999, + "confidence": 0.9988814, + "punctuated_word": "blessings" + }, + { + "word": "of", + "start": 11.994999, + "end": 12.235, + "confidence": 0.99994814, + "punctuated_word": "of" + }, + { + "word": "liberty", + "start": 12.235, + "end": 12.714999, + "confidence": 0.9485822, + "punctuated_word": "liberty" + }, + { + "word": "to", + "start": 12.714999, + "end": 12.875, + "confidence": 0.9982722, + "punctuated_word": "to" + }, + { + "word": "ourselves", + "start": 12.875, + "end": 13.355, + "confidence": 0.9997156, + "punctuated_word": "ourselves" + }, + { + "word": "and", + "start": 13.355, + "end": 13.514999, + "confidence": 0.87418187, + "punctuated_word": "and" + }, + { + "word": "our", + "start": 13.514999, + "end": 13.674999, + "confidence": 0.99951303, + "punctuated_word": "our" + }, + { + "word": "posterity", + "start": 13.674999, + "end": 14.174999, + "confidence": 0.85497844, + "punctuated_word": "posterity" + }, + { + "word": "to", + "start": 14.554999, + "end": 14.795, + "confidence": 0.60699797, + "punctuated_word": "to" + }, + { + "word": "ordain", + "start": 14.795, + "end": 15.195, + "confidence": 0.9992792, + "punctuated_word": "ordain" + }, + { + "word": "and", + "start": 15.195, + "end": 15.434999, + "confidence": 0.9992617, + "punctuated_word": "and" + }, + { + "word": "establish", + "start": 15.434999, + "end": 15.934999, + "confidence": 0.99766684, + "punctuated_word": "establish" + }, + { + "word": "this", + "start": 15.994999, + "end": 16.234999, + "confidence": 0.9996753, + "punctuated_word": "this" + }, + { + "word": "constitution", + "start": 16.234999, + "end": 16.734999, + "confidence": 0.93753284, + "punctuated_word": "constitution" + }, + { + "word": "for", + "start": 16.875, + "end": 17.115, + "confidence": 0.9990471, + "punctuated_word": "for" + }, + { + "word": "the", + "start": 17.115, + "end": 17.275, + "confidence": 0.9999032, + "punctuated_word": "the" + }, + { + "word": "united", + "start": 17.275, + "end": 17.595, + "confidence": 0.9995665, + "punctuated_word": "United" + }, + { + "word": "states", + "start": 17.595, + "end": 17.914999, + "confidence": 0.99979764, + "punctuated_word": "States" + }, + { + "word": "of", + "start": 17.914999, + "end": 18.075, + "confidence": 0.99959284, + "punctuated_word": "of" + }, + { + "word": "america", + "start": 18.075, + "end": 18.575, + "confidence": 0.9946651, + "punctuated_word": "America." + } + ], + "paragraphs": { + "transcript": "\nWe, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", + "paragraphs": [ + { + "sentences": [ + { + "text": "We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America.", + "start": 0.32, + "end": 18.575 + } + ], + "start": 0.32, + "end": 18.575, + "num_words": 52 + } + ] + } + } + ] + } + ], + "summary": { + "result": "success", + "short": "Speaker 1 discusses the goal of establishing a more perfect union, justice, and the common defense for the United States, as part of the Better Union movement. They emphasize the importance of these factors in securing the benefits of liberty for the United States and the world." + } + } +} \ No newline at end of file diff --git a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-options.json b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-options.json index fc8db31e..d82d499c 100644 --- a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-options.json +++ b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-options.json @@ -1 +1,9 @@ -{"channels": 1, "encoding": "mulaw", "language": "en-US", "model": "nova-2", "punctuate": true, "sample_rate": 8000, "smart_format": true} \ No newline at end of file +{ + "channels": 1, + "encoding": "mulaw", + "language": "en-US", + "model": "nova-3", + "punctuate": true, + "sample_rate": 8000, + "smart_format": true +} \ No newline at end of file diff --git a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-response.json b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-response.json index 181e6f6f..3d5b03b6 100644 --- a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-response.json +++ b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-42fc5ed98cabc1fa1a2f276301c27c46dd15f6f5187cd93d944cc94fa81c8469-response.json @@ -1 +1,73 @@ -{"channel": {"alternatives": [{"transcript": "For the United States of America.", "confidence": 0.9990865, "words": [{"word": "for", "start": 17.56, "end": 17.72, "confidence": 0.9873626, "punctuated_word": "For"}, {"word": "the", "start": 17.72, "end": 17.88, "confidence": 0.9990865, "punctuated_word": "the"}, {"word": "united", "start": 17.88, "end": 17.96, "confidence": 0.99964666, "punctuated_word": "United"}, {"word": "states", "start": 17.96, "end": 18.12, "confidence": 0.9992742, "punctuated_word": "States"}, {"word": "of", "start": 18.12, "end": 18.2, "confidence": 0.99879324, "punctuated_word": "of"}, {"word": "america", "start": 18.2, "end": 18.66, "confidence": 0.96024, "punctuated_word": "America."}]}]}, "metadata": {"model_info": {"name": "2-general-nova", "version": "2024-01-18.26916", "arch": "nova-2"}, "request_id": "e1985d01-b8bc-42ab-a16a-e7a8419c07f2", "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c"}, "type": "Results", "channel_index": [0, 1], "duration": 1.6599998, "start": 17.0, "is_final": true, "from_finalize": false, "speech_final": true} \ No newline at end of file +{ + "channel": { + "alternatives": [ + { + "transcript": "For the United States of America.", + "confidence": 0.9990865, + "words": [ + { + "word": "for", + "start": 17.56, + "end": 17.72, + "confidence": 0.9873626, + "punctuated_word": "For" + }, + { + "word": "the", + "start": 17.72, + "end": 17.88, + "confidence": 0.9990865, + "punctuated_word": "the" + }, + { + "word": "united", + "start": 17.88, + "end": 17.96, + "confidence": 0.99964666, + "punctuated_word": "United" + }, + { + "word": "states", + "start": 17.96, + "end": 18.12, + "confidence": 0.9992742, + "punctuated_word": "States" + }, + { + "word": "of", + "start": 18.12, + "end": 18.2, + "confidence": 0.99879324, + "punctuated_word": "of" + }, + { + "word": "america", + "start": 18.2, + "end": 18.66, + "confidence": 0.96024, + "punctuated_word": "America." + } + ] + } + ] + }, + "metadata": { + "model_info": { + "name": "2-general-nova", + "version": "2024-01-18.26916", + "arch": "nova-3" + }, + "request_id": "e1985d01-b8bc-42ab-a16a-e7a8419c07f2", + "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c" + }, + "type": "Results", + "channel_index": [ + 0, + 1 + ], + "duration": 1.6599998, + "start": 17.0, + "is_final": true, + "from_finalize": false, + "speech_final": true +} \ No newline at end of file diff --git a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-options.json b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-options.json index fc8db31e..d82d499c 100644 --- a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-options.json +++ b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-options.json @@ -1 +1,9 @@ -{"channels": 1, "encoding": "mulaw", "language": "en-US", "model": "nova-2", "punctuate": true, "sample_rate": 8000, "smart_format": true} \ No newline at end of file +{ + "channels": 1, + "encoding": "mulaw", + "language": "en-US", + "model": "nova-3", + "punctuate": true, + "sample_rate": 8000, + "smart_format": true +} \ No newline at end of file diff --git a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-response.json b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-response.json index eb0acffb..dd35682f 100644 --- a/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-response.json +++ b/tests/response_data/listen/websocket/a6d1b12d5ce73a51a7b69ab156f0c98c72cdc1cfcf4a25f7b634c328cce4d760-d7334c26cf6468c191e05ff5e8151da9b67985c66ab177e9446fd14bbafd70df-response.json @@ -1 +1,59 @@ -{"channel": {"alternatives": [{"transcript": "Testing. 123. Testing. 123.", "confidence": 0.987885, "words": [{"word": "testing", "start": 1.22, "end": 1.62, "confidence": 0.483064, "punctuated_word": "Testing."}, {"word": "123", "start": 1.62, "end": 2.12, "confidence": 0.93632686, "punctuated_word": "123."}, {"word": "testing", "start": 2.1799998, "end": 2.6799998, "confidence": 0.987885, "punctuated_word": "Testing."}, {"word": "123", "start": 3.1399999, "end": 3.6399999, "confidence": 0.99418044, "punctuated_word": "123."}]}]}, "metadata": {"model_info": {"name": "2-general-nova", "version": "2024-01-18.26916", "arch": "nova-2"}, "request_id": "26dd8cc9-77e6-4db7-a0ae-3034c013cb64", "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c"}, "type": "Results", "channel_index": [0, 1], "duration": 3.08, "start": 0.74, "is_final": true, "from_finalize": false, "speech_final": true} \ No newline at end of file +{ + "channel": { + "alternatives": [ + { + "transcript": "Testing. 123. Testing. 123.", + "confidence": 0.987885, + "words": [ + { + "word": "testing", + "start": 1.22, + "end": 1.62, + "confidence": 0.483064, + "punctuated_word": "Testing." + }, + { + "word": "123", + "start": 1.62, + "end": 2.12, + "confidence": 0.93632686, + "punctuated_word": "123." + }, + { + "word": "testing", + "start": 2.1799998, + "end": 2.6799998, + "confidence": 0.987885, + "punctuated_word": "Testing." + }, + { + "word": "123", + "start": 3.1399999, + "end": 3.6399999, + "confidence": 0.99418044, + "punctuated_word": "123." + } + ] + } + ] + }, + "metadata": { + "model_info": { + "name": "2-general-nova", + "version": "2024-01-18.26916", + "arch": "nova-3" + }, + "request_id": "26dd8cc9-77e6-4db7-a0ae-3034c013cb64", + "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c" + }, + "type": "Results", + "channel_index": [ + 0, + 1 + ], + "duration": 3.08, + "start": 0.74, + "is_final": true, + "from_finalize": false, + "speech_final": true +} \ No newline at end of file diff --git a/tests/unit_test/conversation.txt b/tests/unit_test/conversation.txt index 2af89e08..e2fe96a5 100644 --- a/tests/unit_test/conversation.txt +++ b/tests/unit_test/conversation.txt @@ -16,7 +16,7 @@ Thanks to ChatGPT and the advent of the LLM era, the conversational AI tech stac While these AI agents hold immense potential, many customers have expressed their dissatisfaction with the current crop of voice AI vendors, citing roadblocks related to speed, cost, reliability, and conversational quality. That’s why we’re excited to introduce our own text-to-speech (TTS) API, Deepgram Aura, built for real-time, conversational voice AI agents. -Whether used on its own or in conjunction with our industry-leading Nova-2 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. +Whether used on its own or in conjunction with our industry-leading Nova-3 speech-to-text API, we’ll soon provide developers with a complete speech AI platform, giving them the essential building blocks they need to build high throughput, real-time AI agents of the future. We are thrilled about the progress our initial group of developers has made using Aura, so much so that we are extending limited access to a select few partners who will be free to begin integrating with Aura immediately. With their feedback, we’ll continue to enhance our suite of voices and API features, as well as ensure a smooth launch of their production-grade applications. @@ -51,15 +51,15 @@ Here are some sample clips generated by one of the earliest iterations of Aura. Our Approach ---------- -For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. +For nearly a decade, we’ve worked tirelessly to advance the art of the possible in speech recognition and spoken language understanding. Along the way, we’ve transcribed trillions of spoken words into highly accurate transcriptions. Our model research team has developed novel transformer architectures equipped to deal with the nuances of conversational audio–across different languages, accents, and dialects, while handling disfluencies and the changing rhythms, tones, cadences, and inflections that occur in natural, back-and-forth conversations. -And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. +And all the while, we’ve purposefully built our models under limited constraints to optimize their speed and efficiency. With support for dozens of languages and custom model training, our technical team has trained and deployed thousands of speech AI models (more than anybody else) which we operate and manage for our customers each day using our own computing infrastructure. We also have our own in-house data labeling and data ops team with years of experience building bespoke workflows to record, store, and transfer vast amounts of audio in order to label it and continuously grow our bank of high-quality data (millions of hours and counting) used in our model training. These combined experiences have made us experts in processing and modeling speech audio, especially in support of streaming use cases with our real-time STT models. Our customers have been asking if we could apply the same approach for TTS, and we can. -So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-2 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. +So what can you expect from Aura? Delivering the same market-leading value and performance as Nova-3 does for STT. Aura is built to be the panacea for speed, quality, and efficiency–the fastest of the high-quality options, and the best quality of the fast ones. And that’s really what end users need and what our customers have been asking us to build. "Deepgram is a valued partner, providing our customers with high throughput speech-to-text that delivers unrivaled performance without tradeoffs between quality, speed, and cost. We're excited to see Deepgram extend their speech AI platform and bring this approach to the text-to-speech market." - Richard Dumas, VP AI Product Strategy at Five9 @@ -68,4 +68,4 @@ What's Next ---------- As we’ve discussed, scaled voice agents are a high throughput use case, and we believe their success will ultimately depend on a unified approach to audio, one that strikes the right balance between natural voice quality, responsiveness, and cost-efficiency. And with Aura, we’re just getting started. We’re looking forward to continuing to work with customers like Asurion and partners like Five9 across speech-to-text AND text-to-speech as we help them define the future of AI agents, and we invite you to join us on this journey. -We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file +We expect to release generally early next year, but if you’re working on any real-time AI agent use cases, join our waitlist today to jumpstart your development in production as we continue to refine our model and API features with your direct feedback. \ No newline at end of file diff --git a/tests/unit_test/test_unit_async_listen_rest_file.py b/tests/unit_test/test_unit_async_listen_rest_file.py index f30d610e..68d741c2 100644 --- a/tests/unit_test/test_unit_async_listen_rest_file.py +++ b/tests/unit_test/test_unit_async_listen_rest_file.py @@ -14,7 +14,7 @@ from deepgram import DeepgramClient, PrerecordedOptions, FileSource from tests.utils import read_metadata_string, save_metadata_string -MODEL = "2-general-nova" +MODEL = "3-general-nova" # response constants FILE1 = "preamble-rest.wav" @@ -25,12 +25,12 @@ input_output = [ ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT]}, ), ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT], "results.summary.short": [ diff --git a/tests/unit_test/test_unit_async_listen_rest_url.py b/tests/unit_test/test_unit_async_listen_rest_url.py index 266f238a..10491e58 100644 --- a/tests/unit_test/test_unit_async_listen_rest_url.py +++ b/tests/unit_test/test_unit_async_listen_rest_url.py @@ -14,7 +14,7 @@ from deepgram import DeepgramClient, PrerecordedOptions, PrerecordedResponse from tests.utils import read_metadata_string, save_metadata_string -MODEL = "2-general-nova" +MODEL = "3-general-nova" # response constants URL1 = { @@ -27,12 +27,12 @@ input_output = [ ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT1]}, ), ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT1], "results.summary.short": [URL1_SUMMARIZE1], diff --git a/tests/unit_test/test_unit_async_listen_websocket.py b/tests/unit_test/test_unit_async_listen_websocket.py index 003b2b99..55ce37e6 100644 --- a/tests/unit_test/test_unit_async_listen_websocket.py +++ b/tests/unit_test/test_unit_async_listen_websocket.py @@ -20,10 +20,10 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "3-general-nova" # response constants -INPUT1 = '{"channel": {"alternatives": [{"transcript": "Testing 123. Testing 123.", "confidence": 0.97866726, "words": [{"word": "testing", "start": 1.12, "end": 1.62, "confidence": 0.97866726, "punctuated_word": "Testing"}, {"word": "123", "start": 1.76, "end": 1.8399999, "confidence": 0.73616695, "punctuated_word": "123."}, {"word": "testing", "start": 1.8399999, "end": 2.34, "confidence": 0.99529773, "punctuated_word": "Testing"}, {"word": "123", "start": 2.8799999, "end": 3.3799999, "confidence": 0.9773819, "punctuated_word": "123."}]}]}, "metadata": {"model_info": {"name": "2-general-nova", "version": "2024-01-18.26916", "arch": "nova-2"}, "request_id": "0d2f1ddf-b9aa-40c9-a761-abcd8cf5734f", "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c"}, "type": "Results", "channel_index": [0, 1], "duration": 3.69, "start": 0.0, "is_final": true, "from_finalize": false, "speech_final": true}' +INPUT1 = '{"channel": {"alternatives": [{"transcript": "Testing 123. Testing 123.", "confidence": 0.97866726, "words": [{"word": "testing", "start": 1.12, "end": 1.62, "confidence": 0.97866726, "punctuated_word": "Testing"}, {"word": "123", "start": 1.76, "end": 1.8399999, "confidence": 0.73616695, "punctuated_word": "123."}, {"word": "testing", "start": 1.8399999, "end": 2.34, "confidence": 0.99529773, "punctuated_word": "Testing"}, {"word": "123", "start": 2.8799999, "end": 3.3799999, "confidence": 0.9773819, "punctuated_word": "123."}]}]}, "metadata": {"model_info": {"name": "2-general-nova", "version": "2024-01-18.26916", "arch": "nova-3"}, "request_id": "0d2f1ddf-b9aa-40c9-a761-abcd8cf5734f", "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c"}, "type": "Results", "channel_index": [0, 1], "duration": 3.69, "start": 0.0, "is_final": true, "from_finalize": false, "speech_final": true}' OUTPUT1 = "Testing 123. Testing 123." # Create a list of tuples to store the key-value pairs diff --git a/tests/unit_test/test_unit_listen_rest_file.py b/tests/unit_test/test_unit_listen_rest_file.py index c65fb457..5529d8dc 100644 --- a/tests/unit_test/test_unit_listen_rest_file.py +++ b/tests/unit_test/test_unit_listen_rest_file.py @@ -14,7 +14,7 @@ from deepgram import DeepgramClient, PrerecordedOptions, FileSource from tests.utils import read_metadata_string, save_metadata_string -MODEL = "2-general-nova" +MODEL = "3-general-nova" # response constants FILE1 = "preamble-rest.wav" @@ -25,12 +25,12 @@ input_output = [ ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT]}, ), ( FILE1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [FILE1_SMART_FORMAT], "results.summary.short": [ diff --git a/tests/unit_test/test_unit_listen_rest_url.py b/tests/unit_test/test_unit_listen_rest_url.py index 56568649..efd26582 100644 --- a/tests/unit_test/test_unit_listen_rest_url.py +++ b/tests/unit_test/test_unit_listen_rest_url.py @@ -14,7 +14,7 @@ from deepgram import DeepgramClient, PrerecordedOptions, PrerecordedResponse from tests.utils import read_metadata_string, save_metadata_string -MODEL = "2-general-nova" +MODEL = "3-general-nova" # response constants URL1 = { @@ -27,12 +27,12 @@ input_output = [ ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True), + PrerecordedOptions(model="nova-3", smart_format=True), {"results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT1]}, ), ( URL1, - PrerecordedOptions(model="nova-2", smart_format=True, summarize="v2"), + PrerecordedOptions(model="nova-3", smart_format=True, summarize="v2"), { "results.channels.0.alternatives.0.transcript": [URL1_SMART_FORMAT1], "results.summary.short": [URL1_SUMMARIZE1], diff --git a/tests/unit_test/test_unit_listen_websocket.py b/tests/unit_test/test_unit_listen_websocket.py index 071f67dd..976a9b24 100644 --- a/tests/unit_test/test_unit_listen_websocket.py +++ b/tests/unit_test/test_unit_listen_websocket.py @@ -20,10 +20,10 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "3-general-nova" # response constants -INPUT1 = '{"channel": {"alternatives": [{"transcript": "Testing 123. Testing 123.", "confidence": 0.97866726, "words": [{"word": "testing", "start": 1.12, "end": 1.62, "confidence": 0.97866726, "punctuated_word": "Testing"}, {"word": "123", "start": 1.76, "end": 1.8399999, "confidence": 0.73616695, "punctuated_word": "123."}, {"word": "testing", "start": 1.8399999, "end": 2.34, "confidence": 0.99529773, "punctuated_word": "Testing"}, {"word": "123", "start": 2.8799999, "end": 3.3799999, "confidence": 0.9773819, "punctuated_word": "123."}]}]}, "metadata": {"model_info": {"name": "2-general-nova", "version": "2024-01-18.26916", "arch": "nova-2"}, "request_id": "0d2f1ddf-b9aa-40c9-a761-abcd8cf5734f", "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c"}, "type": "Results", "channel_index": [0, 1], "duration": 3.69, "start": 0.0, "is_final": true, "from_finalize": false, "speech_final": true}' +INPUT1 = '{"channel": {"alternatives": [{"transcript": "Testing 123. Testing 123.", "confidence": 0.97866726, "words": [{"word": "testing", "start": 1.12, "end": 1.62, "confidence": 0.97866726, "punctuated_word": "Testing"}, {"word": "123", "start": 1.76, "end": 1.8399999, "confidence": 0.73616695, "punctuated_word": "123."}, {"word": "testing", "start": 1.8399999, "end": 2.34, "confidence": 0.99529773, "punctuated_word": "Testing"}, {"word": "123", "start": 2.8799999, "end": 3.3799999, "confidence": 0.9773819, "punctuated_word": "123."}]}]}, "metadata": {"model_info": {"name": "2-general-nova", "version": "2024-01-18.26916", "arch": "nova-3"}, "request_id": "0d2f1ddf-b9aa-40c9-a761-abcd8cf5734f", "model_uuid": "c0d1a568-ce81-4fea-97e7-bd45cb1fdf3c"}, "type": "Results", "channel_index": [0, 1], "duration": 3.69, "start": 0.0, "is_final": true, "from_finalize": false, "speech_final": true}' OUTPUT1 = "Testing 123. Testing 123." # Create a list of tuples to store the key-value pairs From d868d9c0fe62a5b8024b0b58c8e1e5fca75bbfd3 Mon Sep 17 00:00:00 2001 From: John Vajda Date: Thu, 6 Feb 2025 18:28:33 -0700 Subject: [PATCH 2/3] fixes some unit tests --- .../test_daily_async_listen_rest_file.py | 4 ++-- .../test_daily_async_listen_rest_url.py | 2 +- .../test_daily_async_listen_websocket.py | 2 +- .../daily_test/test_daily_async_speak_rest.py | 2 +- .../daily_test/test_daily_listen_rest_file.py | 2 +- .../daily_test/test_daily_listen_rest_url.py | 6 +++--- .../daily_test/test_daily_listen_websocket.py | 2 +- tests/daily_test/test_daily_speak_rest.py | 2 +- ...a01219f4780ca70930b0a370ed2163a-error.json | 1 + ...1219f4780ca70930b0a370ed2163a-options.json | 1 + ...219f4780ca70930b0a370ed2163a-response.json | 1 + ...1cb5071a01219f4780ca70930b0a370ed2163a.cmd | 1 + ...7f3fe1052ff1c7b090f7eaf8ede5b76-error.json | 1 + ...3fe1052ff1c7b090f7eaf8ede5b76-options.json | 1 + ...fe1052ff1c7b090f7eaf8ede5b76-response.json | 1 + ...7c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd | 1 + ...a01219f4780ca70930b0a370ed2163a-error.json | 1 + ...1219f4780ca70930b0a370ed2163a-options.json | 1 + ...219f4780ca70930b0a370ed2163a-response.json | 1 + ...1cb5071a01219f4780ca70930b0a370ed2163a.cmd | 1 + ...7f3fe1052ff1c7b090f7eaf8ede5b76-error.json | 1 + ...3fe1052ff1c7b090f7eaf8ede5b76-options.json | 1 + ...fe1052ff1c7b090f7eaf8ede5b76-response.json | 1 + ...7c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd | 1 + ...218311e79efc92ecc82bce3e574c366-error.json | 2 +- ...311e79efc92ecc82bce3e574c366-response.json | 2 +- ...73d3edf41be62eb5dc45199af2ef-response.json | 2 +- ...48abe7519373d3edf41be62eb5dc45199af2ef.wav | Bin 40724 -> 40724 bytes .../test_unit_async_listen_rest_url.py | 2 +- .../test_unit_async_listen_websocket.py | 2 +- .../test_unit_async_read_rest_file.py | 2 +- tests/unit_test/test_unit_listen_rest_file.py | 2 +- tests/unit_test/test_unit_listen_rest_url.py | 2 +- tests/unit_test/test_unit_listen_websocket.py | 2 +- tests/unit_test/test_unit_read_rest_file.py | 2 +- 35 files changed, 37 insertions(+), 21 deletions(-) create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json create mode 100644 tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json create mode 100644 tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd diff --git a/tests/daily_test/test_daily_async_listen_rest_file.py b/tests/daily_test/test_daily_async_listen_rest_file.py index 07f88499..abca4966 100644 --- a/tests/daily_test/test_daily_async_listen_rest_file.py +++ b/tests/daily_test/test_daily_async_listen_rest_file.py @@ -12,7 +12,7 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "general-nova-3" # response constants FILE1 = "preamble-rest.wav" @@ -84,7 +84,7 @@ async def test_daily_async_listen_rest_file(filename, options, expected_output): save_metadata_string(file_options, options.to_json()) save_metadata_string(file_resp, response.to_json()) - # Check the response + # Original assertion for key, value in response.metadata.model_info.items(): assert ( value.name == MODEL diff --git a/tests/daily_test/test_daily_async_listen_rest_url.py b/tests/daily_test/test_daily_async_listen_rest_url.py index 54a3abf4..8fd8a8cc 100644 --- a/tests/daily_test/test_daily_async_listen_rest_url.py +++ b/tests/daily_test/test_daily_async_listen_rest_url.py @@ -12,7 +12,7 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "general-nova-3" # response constants URL1 = { diff --git a/tests/daily_test/test_daily_async_listen_websocket.py b/tests/daily_test/test_daily_async_listen_websocket.py index a05e86b2..f5a3385c 100644 --- a/tests/daily_test/test_daily_async_listen_websocket.py +++ b/tests/daily_test/test_daily_async_listen_websocket.py @@ -19,7 +19,7 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "general-nova-3" # response constants FILE1 = "testing-websocket.wav" diff --git a/tests/daily_test/test_daily_async_speak_rest.py b/tests/daily_test/test_daily_async_speak_rest.py index 234674b9..6003a352 100644 --- a/tests/daily_test/test_daily_async_speak_rest.py +++ b/tests/daily_test/test_daily_async_speak_rest.py @@ -13,7 +13,7 @@ from tests.utils import save_metadata_string TTS_MODEL = "aura-asteria-en" -STT_MODEL = "2-general-nova" +STT_MODEL = "general-nova-3" # response constants TEXT1 = "Hello, world." diff --git a/tests/daily_test/test_daily_listen_rest_file.py b/tests/daily_test/test_daily_listen_rest_file.py index f23a7282..bee4e22a 100644 --- a/tests/daily_test/test_daily_listen_rest_file.py +++ b/tests/daily_test/test_daily_listen_rest_file.py @@ -12,7 +12,7 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "general-nova-3" # response constants FILE1 = "preamble-rest.wav" diff --git a/tests/daily_test/test_daily_listen_rest_url.py b/tests/daily_test/test_daily_listen_rest_url.py index 3807591c..e44ea080 100644 --- a/tests/daily_test/test_daily_listen_rest_url.py +++ b/tests/daily_test/test_daily_listen_rest_url.py @@ -12,14 +12,14 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "general-nova-3" # response constants URL1 = { "url": "https://static.deepgram.com/examples/Bueller-Life-moves-pretty-fast.wav" } -URL1_SMART_FORMAT = "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it." -URL1_SUMMARIZE = "Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it." +URL1_SMART_FORMAT = "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it." +URL1_SUMMARIZE = "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it." # Create a list of tuples to store the key-value pairs input_output = [ diff --git a/tests/daily_test/test_daily_listen_websocket.py b/tests/daily_test/test_daily_listen_websocket.py index 7a1fc5a3..3296fe20 100644 --- a/tests/daily_test/test_daily_listen_websocket.py +++ b/tests/daily_test/test_daily_listen_websocket.py @@ -19,7 +19,7 @@ from tests.utils import save_metadata_string -MODEL = "2-general-nova" +MODEL = "general-nova-3" # response constants FILE1 = "testing-websocket.wav" diff --git a/tests/daily_test/test_daily_speak_rest.py b/tests/daily_test/test_daily_speak_rest.py index 17d409f3..c7950868 100644 --- a/tests/daily_test/test_daily_speak_rest.py +++ b/tests/daily_test/test_daily_speak_rest.py @@ -13,7 +13,7 @@ from tests.utils import save_metadata_string TTS_MODEL = "aura-asteria-en" -STT_MODEL = "2-general-nova" +STT_MODEL = "general-nova-3" # response constants TEXT1 = "Hello, world." diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json new file mode 100644 index 00000000..163053b1 --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json @@ -0,0 +1 @@ +{"actual": "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "expected": ["Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it."]} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json new file mode 100644 index 00000000..5c65fbf8 --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json @@ -0,0 +1 @@ +{"model": "nova-3", "smart_format": true, "summarize": "v2"} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json new file mode 100644 index 00000000..521dbe1a --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json @@ -0,0 +1 @@ +{"metadata": {"transaction_key": "deprecated", "request_id": "149729ee-8f94-4b01-a07b-c9739bc75e09", "sha256": "5324da68ede209a16ac69a38e8cd29cee4d754434a041166cda3a1f5e0b24566", "created": "2025-02-07T01:27:09.194Z", "duration": 17.566313, "channels": 1, "models": ["3b3aabe4-608a-46ac-9585-7960a25daf1a"], "model_info": {"3b3aabe4-608a-46ac-9585-7960a25daf1a": {"name": "general-nova-3", "version": "2024-12-20.0", "arch": "nova-3"}}, "summary_info": {"model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a", "input_tokens": 0, "output_tokens": 0}}, "results": {"channels": [{"alternatives": [{"transcript": "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "confidence": 0.99902344, "words": [{"word": "yep", "start": 5.52, "end": 6.2400002, "confidence": 0.9238281, "punctuated_word": "Yep."}, {"word": "i", "start": 6.96, "end": 7.2799997, "confidence": 0.578125, "punctuated_word": "I"}, {"word": "said", "start": 7.2799997, "end": 7.52, "confidence": 0.90527344, "punctuated_word": "said"}, {"word": "it", "start": 7.52, "end": 7.68, "confidence": 0.9980469, "punctuated_word": "it"}, {"word": "before", "start": 7.68, "end": 8.08, "confidence": 0.89331055, "punctuated_word": "before,"}, {"word": "and", "start": 8.08, "end": 8.16, "confidence": 1.0, "punctuated_word": "and"}, {"word": "i'll", "start": 8.16, "end": 8.4, "confidence": 1.0, "punctuated_word": "I'll"}, {"word": "say", "start": 8.4, "end": 8.48, "confidence": 1.0, "punctuated_word": "say"}, {"word": "it", "start": 8.48, "end": 8.639999, "confidence": 1.0, "punctuated_word": "it"}, {"word": "again", "start": 8.639999, "end": 9.28, "confidence": 0.953125, "punctuated_word": "again."}, {"word": "life", "start": 10.071313, "end": 10.311313, "confidence": 0.99902344, "punctuated_word": "Life"}, {"word": "moves", "start": 10.311313, "end": 10.631312, "confidence": 1.0, "punctuated_word": "moves"}, {"word": "pretty", "start": 10.631312, "end": 11.031313, "confidence": 1.0, "punctuated_word": "pretty"}, {"word": "fast", "start": 11.031313, "end": 11.671312, "confidence": 0.99902344, "punctuated_word": "fast."}, {"word": "you", "start": 12.071312, "end": 12.311313, "confidence": 0.92089844, "punctuated_word": "You"}, {"word": "don't", "start": 12.311313, "end": 12.551312, "confidence": 1.0, "punctuated_word": "don't"}, {"word": "stop", "start": 12.551312, "end": 12.791312, "confidence": 1.0, "punctuated_word": "stop"}, {"word": "and", "start": 12.791312, "end": 12.951312, "confidence": 0.99902344, "punctuated_word": "and"}, {"word": "look", "start": 12.951312, "end": 13.111313, "confidence": 1.0, "punctuated_word": "look"}, {"word": "around", "start": 13.111313, "end": 13.351313, "confidence": 1.0, "punctuated_word": "around"}, {"word": "once", "start": 13.351313, "end": 13.671312, "confidence": 0.99902344, "punctuated_word": "once"}, {"word": "in", "start": 13.671312, "end": 13.831312, "confidence": 0.9970703, "punctuated_word": "in"}, {"word": "a", "start": 13.831312, "end": 13.911312, "confidence": 0.98535156, "punctuated_word": "a"}, {"word": "while", "start": 13.911312, "end": 14.391312, "confidence": 0.93530273, "punctuated_word": "while,"}, {"word": "you", "start": 14.711312, "end": 14.871312, "confidence": 0.99902344, "punctuated_word": "you"}, {"word": "could", "start": 14.871312, "end": 15.031313, "confidence": 1.0, "punctuated_word": "could"}, {"word": "miss", "start": 15.031313, "end": 15.271313, "confidence": 1.0, "punctuated_word": "miss"}, {"word": "it", "start": 15.271313, "end": 15.831312, "confidence": 0.99902344, "punctuated_word": "it."}], "paragraphs": {"transcript": "\nYep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "paragraphs": [{"sentences": [{"text": "Yep.", "start": 5.52, "end": 6.2400002}, {"text": "I said it before, and I'll say it again.", "start": 6.96, "end": 9.28}, {"text": "Life moves pretty fast.", "start": 10.071313, "end": 11.671312}, {"text": "You don't stop and look around once in a while, you could miss it.", "start": 12.071312, "end": 15.831312}], "start": 5.52, "end": 15.831312, "num_words": 28}]}}]}], "summary": {"result": "success", "short": "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it."}}} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd new file mode 100644 index 00000000..fcf4600d --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd @@ -0,0 +1 @@ +{"url": "https://static.deepgram.com/examples/Bueller-Life-moves-pretty-fast.wav"} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json new file mode 100644 index 00000000..ba21e9d0 --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json @@ -0,0 +1 @@ +{"actual": "Speaker 1 discusses the goal of establishing a more perfect union, justice, and the common defense for the United States of America, in order to secure the blessings of liberty and establish the constitution for the country.", "expected": ["*"]} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json new file mode 100644 index 00000000..5c65fbf8 --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json @@ -0,0 +1 @@ +{"model": "nova-3", "smart_format": true, "summarize": "v2"} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json new file mode 100644 index 00000000..7992435c --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json @@ -0,0 +1 @@ +{"metadata": {"transaction_key": "deprecated", "request_id": "826d0ce1-4dae-4f92-98b1-883b2bc909cd", "sha256": "95dc40091b6a8456a1554ddfc4f163768217afd66bee70a10c74bb52805cd0d9", "created": "2025-02-07T01:27:07.508Z", "duration": 19.097937, "channels": 1, "models": ["3b3aabe4-608a-46ac-9585-7960a25daf1a"], "model_info": {"3b3aabe4-608a-46ac-9585-7960a25daf1a": {"name": "general-nova-3", "version": "2024-12-20.0", "arch": "nova-3"}}, "summary_info": {"model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a", "input_tokens": 63, "output_tokens": 43}}, "results": {"channels": [{"alternatives": [{"transcript": "We, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "confidence": 0.9980469, "words": [{"word": "we", "start": 0.32, "end": 0.79999995, "confidence": 0.85961914, "punctuated_word": "We,"}, {"word": "the", "start": 0.79999995, "end": 0.96, "confidence": 0.9980469, "punctuated_word": "the"}, {"word": "people", "start": 0.96, "end": 1.1999999, "confidence": 0.96777344, "punctuated_word": "people"}, {"word": "of", "start": 1.1999999, "end": 1.4399999, "confidence": 0.9223633, "punctuated_word": "of"}, {"word": "the", "start": 1.4399999, "end": 1.5999999, "confidence": 0.9970703, "punctuated_word": "The"}, {"word": "united", "start": 1.5999999, "end": 1.92, "confidence": 0.9970703, "punctuated_word": "United"}, {"word": "states", "start": 1.92, "end": 2.56, "confidence": 0.9890137, "punctuated_word": "States,"}, {"word": "in", "start": 2.56, "end": 2.72, "confidence": 0.9980469, "punctuated_word": "in"}, {"word": "order", "start": 2.72, "end": 2.96, "confidence": 1.0, "punctuated_word": "order"}, {"word": "to", "start": 2.96, "end": 3.12, "confidence": 0.99609375, "punctuated_word": "to"}, {"word": "form", "start": 3.12, "end": 3.28, "confidence": 1.0, "punctuated_word": "form"}, {"word": "a", "start": 3.28, "end": 3.4399998, "confidence": 1.0, "punctuated_word": "a"}, {"word": "more", "start": 3.4399998, "end": 3.6799998, "confidence": 1.0, "punctuated_word": "more"}, {"word": "perfect", "start": 3.6799998, "end": 3.9199998, "confidence": 1.0, "punctuated_word": "perfect"}, {"word": "union", "start": 3.9199998, "end": 4.56, "confidence": 0.9655762, "punctuated_word": "union,"}, {"word": "establish", "start": 4.72, "end": 5.2, "confidence": 0.9770508, "punctuated_word": "establish"}, {"word": "justice", "start": 5.2, "end": 6.08, "confidence": 0.99658203, "punctuated_word": "justice,"}, {"word": "ensure", "start": 6.08, "end": 6.3999996, "confidence": 0.96875, "punctuated_word": "ensure"}, {"word": "domestic", "start": 6.3999996, "end": 6.8799996, "confidence": 0.98095703, "punctuated_word": "domestic"}, {"word": "tranquility", "start": 6.8799996, "end": 7.712875, "confidence": 0.9951172, "punctuated_word": "tranquility,"}, {"word": "provide", "start": 7.792875, "end": 8.352875, "confidence": 1.0, "punctuated_word": "provide"}, {"word": "for", "start": 8.352875, "end": 8.512875, "confidence": 1.0, "punctuated_word": "for"}, {"word": "the", "start": 8.512875, "end": 8.672874, "confidence": 0.99902344, "punctuated_word": "the"}, {"word": "common", "start": 8.672874, "end": 8.912875, "confidence": 0.99902344, "punctuated_word": "common"}, {"word": "defense", "start": 8.912875, "end": 9.6328745, "confidence": 0.98950195, "punctuated_word": "defense,"}, {"word": "promote", "start": 9.6328745, "end": 9.952875, "confidence": 0.9921875, "punctuated_word": "promote"}, {"word": "the", "start": 9.952875, "end": 10.192875, "confidence": 0.9951172, "punctuated_word": "the"}, {"word": "general", "start": 10.192875, "end": 10.512875, "confidence": 1.0, "punctuated_word": "general"}, {"word": "welfare", "start": 10.512875, "end": 11.152875, "confidence": 0.9719238, "punctuated_word": "welfare,"}, {"word": "and", "start": 11.152875, "end": 11.232875, "confidence": 1.0, "punctuated_word": "and"}, {"word": "secure", "start": 11.232875, "end": 11.552875, "confidence": 1.0, "punctuated_word": "secure"}, {"word": "the", "start": 11.552875, "end": 11.792875, "confidence": 1.0, "punctuated_word": "the"}, {"word": "blessings", "start": 11.792875, "end": 12.112875, "confidence": 0.9975586, "punctuated_word": "blessings"}, {"word": "of", "start": 12.112875, "end": 12.272875, "confidence": 1.0, "punctuated_word": "of"}, {"word": "liberty", "start": 12.272875, "end": 12.672874, "confidence": 0.9970703, "punctuated_word": "liberty"}, {"word": "to", "start": 12.672874, "end": 12.912874, "confidence": 0.9902344, "punctuated_word": "to"}, {"word": "ourselves", "start": 12.912874, "end": 13.312875, "confidence": 0.99902344, "punctuated_word": "ourselves"}, {"word": "and", "start": 13.312875, "end": 13.552875, "confidence": 0.8774414, "punctuated_word": "and"}, {"word": "our", "start": 13.552875, "end": 13.712875, "confidence": 0.9970703, "punctuated_word": "our"}, {"word": "posterity", "start": 13.712875, "end": 14.592875, "confidence": 0.99194336, "punctuated_word": "posterity"}, {"word": "to", "start": 14.592875, "end": 14.832874, "confidence": 0.6015625, "punctuated_word": "to"}, {"word": "ordain", "start": 14.832874, "end": 15.312875, "confidence": 0.99853516, "punctuated_word": "ordain"}, {"word": "and", "start": 15.312875, "end": 15.472875, "confidence": 0.99902344, "punctuated_word": "and"}, {"word": "establish", "start": 15.472875, "end": 15.952875, "confidence": 0.9980469, "punctuated_word": "establish"}, {"word": "this", "start": 15.952875, "end": 16.272875, "confidence": 0.99902344, "punctuated_word": "this"}, {"word": "constitution", "start": 16.272875, "end": 16.912874, "confidence": 0.9589844, "punctuated_word": "constitution"}, {"word": "for", "start": 16.912874, "end": 17.152874, "confidence": 0.9980469, "punctuated_word": "for"}, {"word": "the", "start": 17.152874, "end": 17.312874, "confidence": 0.9980469, "punctuated_word": "The"}, {"word": "united", "start": 17.312874, "end": 17.632875, "confidence": 0.9980469, "punctuated_word": "United"}, {"word": "states", "start": 17.632875, "end": 17.952875, "confidence": 1.0, "punctuated_word": "States"}, {"word": "of", "start": 17.952875, "end": 18.192875, "confidence": 1.0, "punctuated_word": "Of"}, {"word": "america", "start": 18.192875, "end": 18.912874, "confidence": 0.9975586, "punctuated_word": "America."}], "paragraphs": {"transcript": "\nWe, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "paragraphs": [{"sentences": [{"text": "We, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "start": 0.32, "end": 18.912874}], "start": 0.32, "end": 18.912874, "num_words": 52}]}}]}], "summary": {"result": "success", "short": "Speaker 1 discusses the goal of establishing a more perfect union, justice, and the common defense for the United States of America, in order to secure the blessings of liberty and establish the constitution for the country."}}} \ No newline at end of file diff --git a/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd new file mode 100644 index 00000000..ce49b2cd --- /dev/null +++ b/tests/response_data/listen/rest/a231370d439312b1a404bb6ad8de955e900ec8eae9a906329af8cc672e6ec7ba-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd @@ -0,0 +1 @@ +"preamble-rest.wav" \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json new file mode 100644 index 00000000..163053b1 --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-error.json @@ -0,0 +1 @@ +{"actual": "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "expected": ["Yep. I said it before and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it."]} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json new file mode 100644 index 00000000..2a102e94 --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-options.json @@ -0,0 +1 @@ +{"model": "nova-3", "smart_format": true} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json new file mode 100644 index 00000000..808f4381 --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a-response.json @@ -0,0 +1 @@ +{"metadata": {"transaction_key": "deprecated", "request_id": "6d2ca680-40b0-4497-a673-e6c8bcfa2c77", "sha256": "5324da68ede209a16ac69a38e8cd29cee4d754434a041166cda3a1f5e0b24566", "created": "2025-02-07T01:27:08.851Z", "duration": 17.566313, "channels": 1, "models": ["3b3aabe4-608a-46ac-9585-7960a25daf1a"], "model_info": {"3b3aabe4-608a-46ac-9585-7960a25daf1a": {"name": "general-nova-3", "version": "2024-12-20.0", "arch": "nova-3"}}}, "results": {"channels": [{"alternatives": [{"transcript": "Yep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "confidence": 0.99902344, "words": [{"word": "yep", "start": 5.52, "end": 6.2400002, "confidence": 0.9238281, "punctuated_word": "Yep."}, {"word": "i", "start": 6.96, "end": 7.2799997, "confidence": 0.578125, "punctuated_word": "I"}, {"word": "said", "start": 7.2799997, "end": 7.52, "confidence": 0.90527344, "punctuated_word": "said"}, {"word": "it", "start": 7.52, "end": 7.68, "confidence": 0.9980469, "punctuated_word": "it"}, {"word": "before", "start": 7.68, "end": 8.08, "confidence": 0.89331055, "punctuated_word": "before,"}, {"word": "and", "start": 8.08, "end": 8.16, "confidence": 1.0, "punctuated_word": "and"}, {"word": "i'll", "start": 8.16, "end": 8.4, "confidence": 1.0, "punctuated_word": "I'll"}, {"word": "say", "start": 8.4, "end": 8.48, "confidence": 1.0, "punctuated_word": "say"}, {"word": "it", "start": 8.48, "end": 8.639999, "confidence": 1.0, "punctuated_word": "it"}, {"word": "again", "start": 8.639999, "end": 9.28, "confidence": 0.953125, "punctuated_word": "again."}, {"word": "life", "start": 10.071313, "end": 10.311313, "confidence": 0.99902344, "punctuated_word": "Life"}, {"word": "moves", "start": 10.311313, "end": 10.631312, "confidence": 1.0, "punctuated_word": "moves"}, {"word": "pretty", "start": 10.631312, "end": 11.031313, "confidence": 1.0, "punctuated_word": "pretty"}, {"word": "fast", "start": 11.031313, "end": 11.671312, "confidence": 0.99902344, "punctuated_word": "fast."}, {"word": "you", "start": 12.071312, "end": 12.311313, "confidence": 0.92089844, "punctuated_word": "You"}, {"word": "don't", "start": 12.311313, "end": 12.551312, "confidence": 1.0, "punctuated_word": "don't"}, {"word": "stop", "start": 12.551312, "end": 12.791312, "confidence": 1.0, "punctuated_word": "stop"}, {"word": "and", "start": 12.791312, "end": 12.951312, "confidence": 0.99902344, "punctuated_word": "and"}, {"word": "look", "start": 12.951312, "end": 13.111313, "confidence": 1.0, "punctuated_word": "look"}, {"word": "around", "start": 13.111313, "end": 13.351313, "confidence": 1.0, "punctuated_word": "around"}, {"word": "once", "start": 13.351313, "end": 13.671312, "confidence": 0.99902344, "punctuated_word": "once"}, {"word": "in", "start": 13.671312, "end": 13.831312, "confidence": 0.9970703, "punctuated_word": "in"}, {"word": "a", "start": 13.831312, "end": 13.911312, "confidence": 0.98535156, "punctuated_word": "a"}, {"word": "while", "start": 13.911312, "end": 14.391312, "confidence": 0.93530273, "punctuated_word": "while,"}, {"word": "you", "start": 14.711312, "end": 14.871312, "confidence": 0.99902344, "punctuated_word": "you"}, {"word": "could", "start": 14.871312, "end": 15.031313, "confidence": 1.0, "punctuated_word": "could"}, {"word": "miss", "start": 15.031313, "end": 15.271313, "confidence": 1.0, "punctuated_word": "miss"}, {"word": "it", "start": 15.271313, "end": 15.831312, "confidence": 0.99902344, "punctuated_word": "it."}], "paragraphs": {"transcript": "\nYep. I said it before, and I'll say it again. Life moves pretty fast. You don't stop and look around once in a while, you could miss it.", "paragraphs": [{"sentences": [{"text": "Yep.", "start": 5.52, "end": 6.2400002}, {"text": "I said it before, and I'll say it again.", "start": 6.96, "end": 9.28}, {"text": "Life moves pretty fast.", "start": 10.071313, "end": 11.671312}, {"text": "You don't stop and look around once in a while, you could miss it.", "start": 12.071312, "end": 15.831312}], "start": 5.52, "end": 15.831312, "num_words": 28}]}}]}]}} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd new file mode 100644 index 00000000..fcf4600d --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-29e7c8100617f70da4ae9da1921cb5071a01219f4780ca70930b0a370ed2163a.cmd @@ -0,0 +1 @@ +{"url": "https://static.deepgram.com/examples/Bueller-Life-moves-pretty-fast.wav"} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json new file mode 100644 index 00000000..941730b5 --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-error.json @@ -0,0 +1 @@ +{"actual": "We, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "expected": ["We, the people of the United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for the United States of America."]} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json new file mode 100644 index 00000000..2a102e94 --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-options.json @@ -0,0 +1 @@ +{"model": "nova-3", "smart_format": true} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json new file mode 100644 index 00000000..90e8c572 --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76-response.json @@ -0,0 +1 @@ +{"metadata": {"transaction_key": "deprecated", "request_id": "68879627-a599-4a4c-86aa-c84bbc725679", "sha256": "95dc40091b6a8456a1554ddfc4f163768217afd66bee70a10c74bb52805cd0d9", "created": "2025-02-07T01:27:59.312Z", "duration": 19.097937, "channels": 1, "models": ["3b3aabe4-608a-46ac-9585-7960a25daf1a"], "model_info": {"3b3aabe4-608a-46ac-9585-7960a25daf1a": {"name": "general-nova-3", "version": "2024-12-20.0", "arch": "nova-3"}}}, "results": {"channels": [{"alternatives": [{"transcript": "We, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "confidence": 0.9980469, "words": [{"word": "we", "start": 0.32, "end": 0.79999995, "confidence": 0.85961914, "punctuated_word": "We,"}, {"word": "the", "start": 0.79999995, "end": 0.96, "confidence": 0.9980469, "punctuated_word": "the"}, {"word": "people", "start": 0.96, "end": 1.1999999, "confidence": 0.96777344, "punctuated_word": "people"}, {"word": "of", "start": 1.1999999, "end": 1.4399999, "confidence": 0.9223633, "punctuated_word": "of"}, {"word": "the", "start": 1.4399999, "end": 1.5999999, "confidence": 0.9970703, "punctuated_word": "The"}, {"word": "united", "start": 1.5999999, "end": 1.92, "confidence": 0.9970703, "punctuated_word": "United"}, {"word": "states", "start": 1.92, "end": 2.56, "confidence": 0.9890137, "punctuated_word": "States,"}, {"word": "in", "start": 2.56, "end": 2.72, "confidence": 0.9980469, "punctuated_word": "in"}, {"word": "order", "start": 2.72, "end": 2.96, "confidence": 1.0, "punctuated_word": "order"}, {"word": "to", "start": 2.96, "end": 3.12, "confidence": 0.99609375, "punctuated_word": "to"}, {"word": "form", "start": 3.12, "end": 3.28, "confidence": 1.0, "punctuated_word": "form"}, {"word": "a", "start": 3.28, "end": 3.4399998, "confidence": 1.0, "punctuated_word": "a"}, {"word": "more", "start": 3.4399998, "end": 3.6799998, "confidence": 1.0, "punctuated_word": "more"}, {"word": "perfect", "start": 3.6799998, "end": 3.9199998, "confidence": 1.0, "punctuated_word": "perfect"}, {"word": "union", "start": 3.9199998, "end": 4.56, "confidence": 0.9655762, "punctuated_word": "union,"}, {"word": "establish", "start": 4.72, "end": 5.2, "confidence": 0.9770508, "punctuated_word": "establish"}, {"word": "justice", "start": 5.2, "end": 6.08, "confidence": 0.99658203, "punctuated_word": "justice,"}, {"word": "ensure", "start": 6.08, "end": 6.3999996, "confidence": 0.96875, "punctuated_word": "ensure"}, {"word": "domestic", "start": 6.3999996, "end": 6.8799996, "confidence": 0.98095703, "punctuated_word": "domestic"}, {"word": "tranquility", "start": 6.8799996, "end": 7.712875, "confidence": 0.9951172, "punctuated_word": "tranquility,"}, {"word": "provide", "start": 7.792875, "end": 8.352875, "confidence": 1.0, "punctuated_word": "provide"}, {"word": "for", "start": 8.352875, "end": 8.512875, "confidence": 1.0, "punctuated_word": "for"}, {"word": "the", "start": 8.512875, "end": 8.672874, "confidence": 0.99902344, "punctuated_word": "the"}, {"word": "common", "start": 8.672874, "end": 8.912875, "confidence": 0.99902344, "punctuated_word": "common"}, {"word": "defense", "start": 8.912875, "end": 9.6328745, "confidence": 0.98950195, "punctuated_word": "defense,"}, {"word": "promote", "start": 9.6328745, "end": 9.952875, "confidence": 0.9921875, "punctuated_word": "promote"}, {"word": "the", "start": 9.952875, "end": 10.192875, "confidence": 0.9951172, "punctuated_word": "the"}, {"word": "general", "start": 10.192875, "end": 10.512875, "confidence": 1.0, "punctuated_word": "general"}, {"word": "welfare", "start": 10.512875, "end": 11.152875, "confidence": 0.9719238, "punctuated_word": "welfare,"}, {"word": "and", "start": 11.152875, "end": 11.232875, "confidence": 1.0, "punctuated_word": "and"}, {"word": "secure", "start": 11.232875, "end": 11.552875, "confidence": 1.0, "punctuated_word": "secure"}, {"word": "the", "start": 11.552875, "end": 11.792875, "confidence": 1.0, "punctuated_word": "the"}, {"word": "blessings", "start": 11.792875, "end": 12.112875, "confidence": 0.9975586, "punctuated_word": "blessings"}, {"word": "of", "start": 12.112875, "end": 12.272875, "confidence": 1.0, "punctuated_word": "of"}, {"word": "liberty", "start": 12.272875, "end": 12.672874, "confidence": 0.9970703, "punctuated_word": "liberty"}, {"word": "to", "start": 12.672874, "end": 12.912874, "confidence": 0.9902344, "punctuated_word": "to"}, {"word": "ourselves", "start": 12.912874, "end": 13.312875, "confidence": 0.99902344, "punctuated_word": "ourselves"}, {"word": "and", "start": 13.312875, "end": 13.552875, "confidence": 0.8774414, "punctuated_word": "and"}, {"word": "our", "start": 13.552875, "end": 13.712875, "confidence": 0.9970703, "punctuated_word": "our"}, {"word": "posterity", "start": 13.712875, "end": 14.592875, "confidence": 0.99194336, "punctuated_word": "posterity"}, {"word": "to", "start": 14.592875, "end": 14.832874, "confidence": 0.6015625, "punctuated_word": "to"}, {"word": "ordain", "start": 14.832874, "end": 15.312875, "confidence": 0.99853516, "punctuated_word": "ordain"}, {"word": "and", "start": 15.312875, "end": 15.472875, "confidence": 0.99902344, "punctuated_word": "and"}, {"word": "establish", "start": 15.472875, "end": 15.952875, "confidence": 0.9980469, "punctuated_word": "establish"}, {"word": "this", "start": 15.952875, "end": 16.272875, "confidence": 0.99902344, "punctuated_word": "this"}, {"word": "constitution", "start": 16.272875, "end": 16.912874, "confidence": 0.9589844, "punctuated_word": "constitution"}, {"word": "for", "start": 16.912874, "end": 17.152874, "confidence": 0.9980469, "punctuated_word": "for"}, {"word": "the", "start": 17.152874, "end": 17.312874, "confidence": 0.9980469, "punctuated_word": "The"}, {"word": "united", "start": 17.312874, "end": 17.632875, "confidence": 0.9980469, "punctuated_word": "United"}, {"word": "states", "start": 17.632875, "end": 17.952875, "confidence": 1.0, "punctuated_word": "States"}, {"word": "of", "start": 17.952875, "end": 18.192875, "confidence": 1.0, "punctuated_word": "Of"}, {"word": "america", "start": 18.192875, "end": 18.912874, "confidence": 0.9975586, "punctuated_word": "America."}], "paragraphs": {"transcript": "\nWe, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "paragraphs": [{"sentences": [{"text": "We, the people of The United States, in order to form a more perfect union, establish justice, ensure domestic tranquility, provide for the common defense, promote the general welfare, and secure the blessings of liberty to ourselves and our posterity to ordain and establish this constitution for The United States Of America.", "start": 0.32, "end": 18.912874}], "start": 0.32, "end": 18.912874, "num_words": 52}]}}]}]}} \ No newline at end of file diff --git a/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd new file mode 100644 index 00000000..ce49b2cd --- /dev/null +++ b/tests/response_data/listen/rest/c4e1c0031174878d8f0e3dbd87916ee16d56f1c610ac525af5712ea37226a455-a17f4880c5b4cf124ac54d06d77c9f0ab7f3fe1052ff1c7b090f7eaf8ede5b76.cmd @@ -0,0 +1 @@ +"preamble-rest.wav" \ No newline at end of file diff --git a/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-error.json b/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-error.json index 4bdfc185..01189716 100644 --- a/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-error.json +++ b/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-error.json @@ -1 +1 @@ -{"actual": "The potential for voice-based interfaces in conversational AI applications is discussed, with a focus on voice-premises and wearable devices. The success of voice-first experiences and tools, including DeepgramQuad, is highlighted, along with the potential for high-throughput and fast text-to-speech conversion for AI agents. The speakers emphasize the benefits of voice quality, including natural speech flow, and the potential for AI agents to be more human than humans in the future. The company is excited about the potential for AI agents to be more human than humans in the future.", "expected": ["*"]} \ No newline at end of file +{"actual": "The potential for voice-based interfaces in conversational AI applications is discussed, with a focus on voice-premises and wearable devices. The success of voice-first experiences and tools, including DeepgramQuad, is highlighted, with a focus on improving customer outcomes and speed and efficiency for everyday exchanges. The speakers emphasize the benefits of voice quality, including natural speech flow, and the potential for AI agents to be more human than humans in speech recognition, including the use of natural voices and audio to create voice-like experiences. They also mention their involvement in machine learning and their plans to expand their waitlist for a speech-to-text model. They expect to release generally early next year, but if working on any real-time AI agent use cases, they can join their waitlist to jumpstart their development in production.", "expected": ["*"]} \ No newline at end of file diff --git a/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-response.json b/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-response.json index 86357ba7..216ab278 100644 --- a/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-response.json +++ b/tests/response_data/read/rest/3917a1c81c08e360c0d4bba0ff9ebd645e610e4149483e5f2888a2c5df388b37-23e873efdfd4d680286fda14ff8f10864218311e79efc92ecc82bce3e574c366-response.json @@ -1 +1 @@ -{"metadata": {"request_id": "23139ba1-d680-41b2-b5a4-d977f0dcdcc6", "created": "2024-08-02T09:02:51.744Z", "language": "en", "summary_info": {"model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a", "input_tokens": 1855, "output_tokens": 113}}, "results": {"summary": {"text": "The potential for voice-based interfaces in conversational AI applications is discussed, with a focus on voice-premises and wearable devices. The success of voice-first experiences and tools, including DeepgramQuad, is highlighted, along with the potential for high-throughput and fast text-to-speech conversion for AI agents. The speakers emphasize the benefits of voice quality, including natural speech flow, and the potential for AI agents to be more human than humans in the future. The company is excited about the potential for AI agents to be more human than humans in the future."}}} \ No newline at end of file +{"metadata": {"request_id": "9d8c2225-3de6-450f-b48e-92e13557e87f", "created": "2025-02-07T01:27:26.032Z", "language": "en", "summary_info": {"model_uuid": "67875a7f-c9c4-48a0-aa55-5bdb8a91c34a", "input_tokens": 1855, "output_tokens": 160}}, "results": {"summary": {"text": "The potential for voice-based interfaces in conversational AI applications is discussed, with a focus on voice-premises and wearable devices. The success of voice-first experiences and tools, including DeepgramQuad, is highlighted, with a focus on improving customer outcomes and speed and efficiency for everyday exchanges. The speakers emphasize the benefits of voice quality, including natural speech flow, and the potential for AI agents to be more human than humans in speech recognition, including the use of natural voices and audio to create voice-like experiences. They also mention their involvement in machine learning and their plans to expand their waitlist for a speech-to-text model. They expect to release generally early next year, but if working on any real-time AI agent use cases, they can join their waitlist to jumpstart their development in production."}}} \ No newline at end of file diff --git a/tests/response_data/speak/rest/18144fa7f4709bc9972c24d0addc8faa360dca933e7e0027b062e57b7c41f426-f8c3bf62a9aa3e6fc1619c250e48abe7519373d3edf41be62eb5dc45199af2ef-response.json b/tests/response_data/speak/rest/18144fa7f4709bc9972c24d0addc8faa360dca933e7e0027b062e57b7c41f426-f8c3bf62a9aa3e6fc1619c250e48abe7519373d3edf41be62eb5dc45199af2ef-response.json index 90a5e0c6..87a8e1c8 100644 --- a/tests/response_data/speak/rest/18144fa7f4709bc9972c24d0addc8faa360dca933e7e0027b062e57b7c41f426-f8c3bf62a9aa3e6fc1619c250e48abe7519373d3edf41be62eb5dc45199af2ef-response.json +++ b/tests/response_data/speak/rest/18144fa7f4709bc9972c24d0addc8faa360dca933e7e0027b062e57b7c41f426-f8c3bf62a9aa3e6fc1619c250e48abe7519373d3edf41be62eb5dc45199af2ef-response.json @@ -1 +1 @@ -{"content_type": "audio/wav", "request_id": "34d37910-7a31-4f3f-9f5c-79630215bce7", "model_uuid": "06d0e057-2626-4b5a-93c6-38c3686ab88c", "model_name": "aura-asteria-en", "characters": 13, "transfer_encoding": "chunked", "date": "Fri, 02 Aug 2024 09:02:51 GMT"} \ No newline at end of file +{"content_type": "audio/wav", "request_id": "2f4af8c9-d7d6-48ca-a8b7-81df5fa81eb8", "model_uuid": "ecb76e9d-f2db-4127-8060-79b05590d22f", "model_name": "aura-asteria-en", "characters": 13, "transfer_encoding": "chunked", "date": "Fri, 07 Feb 2025 01:27:26 GMT"} \ No newline at end of file diff --git a/tests/response_data/speak/rest/18144fa7f4709bc9972c24d0addc8faa360dca933e7e0027b062e57b7c41f426-f8c3bf62a9aa3e6fc1619c250e48abe7519373d3edf41be62eb5dc45199af2ef.wav b/tests/response_data/speak/rest/18144fa7f4709bc9972c24d0addc8faa360dca933e7e0027b062e57b7c41f426-f8c3bf62a9aa3e6fc1619c250e48abe7519373d3edf41be62eb5dc45199af2ef.wav index d60645603530ce9023240c13da970364061a204c..552ebfb61a41c0a9ea179f4c959cc9b45b47ef32 100644 GIT binary patch literal 40724 zcmW(-1$Y}r)84xiGg!7IGsa;~!_3Ug%$zjTFf((~h8t{{85(Su*)ho$pN?t&{r*Rf z63f=!4-7J3g9su z4qL&4a01A}zJl&xHT=GOaMR8l~CEKj8|%)H6B1$HdVqhBUe#oD%X{EFcI0e$G63VZ~1g70W8i=hs@1Vgd) zSW9d)=nB6pb)X&OgJxJdb{C8WX~2ynF%f%+_M8Zs!n;ZeOhJDqu)jbCNCmIK7%T%D z2y(z0a39I^5gY~QKmr^DJeUKD+aFTLZ*#-B2OF#vx!+a#Oqi`9LcN}bjra0G0Meyc(<9|q6B7jO$42fx8lU>4G6Q*>2LY!LQ0 zxQEbn4;Dff!qEqi4d%jWXk9PBBecfm2oF=hS>OVfU{!>Q@z^8mJhlMah@Hc}U=BQl ziC_;}(@=OHUV*bqi9f+D;Nw?Y4aan(2<2kp!|a3pO4Ep#~U{EN8&> za4A%QRtQO#;5!%xXm}kq0_TukO<*^C4m*G;APVau6yzdI&w<_1JyxUZ>cdiSB76!z z!t<~(xC6R^r|>`c6<&p>kyLj<6TGEtR8}hqFavH@Dk=Sxt4dgL!LsNtIBWy=z+|up z>;|_%2$X=oL0`}Yz1|2)fujgb8_+wdA_)hf1L?;K4!{<$0o;i&J`P)keFOE87Aqpl z+YhEA{1l?y7Qwn;7$^g7{O{giIJ#;%n2asPzM$XsMVQzIwu74B8H@#cKxe=s8#|43 zw-H_W4QXX4+FN5}>j#kSEJM38g9cy%()$l&c|*We@C@j&Wb6&t3VI-%bU<261nrQe z#liQ=CFQA-h48i&8o_a7)vpl;>_?cZhZx~KlHOUQ{nrYB@kl=tK_|o?VPx-J;R2Zc z|9nyjoJZJ7gm00o_k&G9hAD7~@~0vw4p;@@P(k`1h4yPj7~g|9rx;;mK6nGe%2?%> z@(GJF+nX)B-=?bcFOi2n9pIW1xU)SQl&*HXmz-?FZ%1XMI4r z>i{|-86XBgRQeCHu0LQk*cRp^3mpy~z=cqPJ}?DK!KR?qZ$k1J2y{rwI6SZ9!0#{% z;cYXz+d{~|USKV<q$Vma)Z%S8G ztfV2G_eK^iKp*@ByMrub3&#=rHw7cmE<>;}`fCD`=yk9P;d2vo!(@0HZUqs9+lk0h zo*^l`fW?S?5|LiEz!eBv^U)eKkbvzFGiQU=pc+sR=3)_!=3*u+2Ge815Zfn!)1W+7 z7qQ@Bv^Vt0Na}UbzV5@GuoBFInMecqNHW8bS6Ku5A{(5GFtQK4MR#^1q>Mm6mBUE% z^GtXM#)Bjfhu%99X|Wz`2|K~o=>LXjEj+A9EW zYQU=suXy22g!!omH;Z9Jd4X8$0rD~t_$QLkQe^J~5mTjrub?h=2dKdncmsK@C!jxO zz%-x>?1yyK7u-U=(2rLBTDhzoQD|iie1)!h2CIWFXgx#WFW3b6$+^gKyMo$C595*j zmqNN>!3^YWTEgebSEWAMcUgE4d7=%-Qvh%iX?QoX`2SgHd*l`Kv3`j8Qm|#npB{l) z&=y(QTV;{5QkkSYR}LXx^%Tje3-Z0afeV|7ox+x2L4>Kph*|%GQ^7xgK#YD;X{iD6f?5a3_-PTA)Q7+XCN?&%)nf3D|d}H8b`AMF+o>dWu`Au4E{fVp8H2 zqcT+~jquSHp{y1d0FHrJY&8b4`nU;Sj3r_>5X(11`}Dy{$d-RYK0!c!aS1ddy{tmG zJ`8hUUq!3jQ5bj`$zdpBrY<0W{NigQ#ohoS`(2EfVkg4mUZllg2)l3LA4nHoI2vKE z2-!zRWUtMkA1+0jU4`sv55l1zJVaitBCsI#8ilY!Ay#;TJiZ34=QW&+e8gJ#8^Yd1 zsD_;KSy`sY@)fzUvJ7D_4WWEJV%M{XPj4Uz-9;SO1xe-&k~9yV zgNx|1x*;rnMc&JZxb1(@T4ls9cH}SmAg|$o0kn%|@G`uHJar24y!U_|tBs{13lzb3 zAc7d|0w|5R{4BElRL~x+HH@O5k;rcDpnFyVFX2oimHxPC|AX%6}JXiw$1xr!f zFb}Lo`mKSy_C@63HAsIS;175TvFInI0zQHQS{RreR@X}edIwB zwm|-12sQ%CMtfR^^m76`jEzM)(qLDSXAB`PTZBBo6vVwvK{4`mNhs>b!mL;WWJ@{7 z-{v8$)BJz|(YkTkSt9gpBX^zOMRzP*QJ@CU4ceBLJI zH>FViD7%$$%3sPrB~|IJyj8{_zjzMW?+nDX6;P~J0(2M$3X!IVASqWsQGym5h}FXG zf!`1(HbM--Ph?MA@S(R(hkT*MrbM4(a4GlG6kfBc206#P98p*Y1aK{0>fmUZsbU zCvTVA%hhEdchImn|ExnQM%MFzj<+40kUMAHm~KDsfA&b)~p%}wo4X%1#^+QLuw**!$i1VxO8L( zS&Cw4ksZ!I5~fKH<(JB16sf&H9=sTkC=Td?O~7KY514ZqsrT$>aX*-+DQjNu+~{tcQZC~`W{0fi zS=%xPr(Z~}?~!7>mNxpTM0I&A|Av+#T=0~Cm`~?x?91^V3y8rP;qByXsv6rzcqH9Z z)Yy7Lt=XizXq;fFWNYl$=!DJ%F*{@CIOo|LS%;aH>H4eYgPGDXt}k7coEmx;s2gww zMhDIXPldaXLns~FlfN%w%2s#>{J?JE*~Dz(HDOc5sPc#d#89F+QJ(PP({LIaiVXle zkmWCwM@hU;hd;qqVkT0@NJE4UdIBlFnqH!?ThUCPFXW;`_P7{_ebQ~UZH?QL^fLWr z)}-ulrOM~{vWd*EDLvgQ9q&xf)l1+Np$B6jY%7V02{5EsBLcm=K5B~HqSxC40rX3oe(?D)yuiaR?&Px@6go1GZc#dPWK>- zLQ{hy1G#?5zc8>a*epDU%%keDllci^wycFbsD!s9CaRjM->bW6j%(g%G}_;^-!!c> zm(_);T%ri~V!wlXN~%0iY{qA?IrLfbQrHyo`#+R?@Y;$V6=WA~DAt5_M_;r3#THPn zxo#O8`&Z(Kv>KVeXEn_ZWq-}Cm31&JKcSxMu4SLD0RLP5#SGc=dXqtp3~;&aJ{_BkFOuAZiQZdhv?Vo9=Hv7dCdbeUsITw+Xf=XYBh z%QIt zskx`Us5_^hZuraay8$<3=;L&8+OKN8su24K7t2+}6#idk6g8dP89o#=1PXmyO0E{S zDjw>+<@J_)2`-LqVRMBcN-g5LzM*}0+=T>Ra(vpS^uIC>W@TpOr3aJmxF^OmwCW7s zRU2R(p)vh1TqzJPi5CA;{Iz&WNsh0xKPR{`+&WrJ8@Sc{b773MN_hg}36pw_hSToV z4>#5@=t(Z0<7)K#Xs)_!LEJymC9k}2>5~vzpK|YO=R0i8yyofE*<(VV)eAifaG_h-% zlCdK5Y({q4iKG}$_t+#yUo)+3iZ7Q&uzyCsg--=7fw#U;$%&GFzVH4>a9E^vbQraX zuFRI_n~SC8Ey_-K2!ye1#1}QA-Kxip!%fBJrPhzO7501fV|K0miM56$!L&@jM$?B# z0Pm!~_(RO6=#U5*VuEFZrGw9cxuKbnrO`XIz;+ToNlxg&AYN0Itj<&S(oEEp)vQrJ zQzffL5_|E77(`LlCv+}Xl)sg-N+)@SbVWQX9O2t?6_|U`9+9uXhyL>ZZh_0emLXfX zS7bkRL70xQnmNW|Yv-8r@oSPkr=7{rWM*XKr;bg&p3o`&oAaopw*DGXSGmd8X9~!U z;p|`qf4Jm!$ynbOKNsj3>K?8d36M4EzHBzXO4uu&mFg<(!4SNR>Yh49yHR(*>x{~I+#-(kcZLGz$gEh<5$5nTTLR`k4fEMtQ+*4wZZ=zEZrlMDcwHln%cmSJ9og{$dB^9>^k+)crMWw0hleU0+?I zZj*L1+VgqUW8xQX#lM4vu%|Lm9xZi65lKUCJ=2SdM&5)pp?^aYkXK_vzlRr)qgkDN z7c5e();%%(Z9g6R#j`D`R7yDIe9E!pGf7tx`o|B8$*>JGZq?MoGZi17#OR|}LbQK{ z?_0^)l7f<&zCV2{{6m8e!YuiYy3DlW@_4;eUonBdFbC01^{mPSj=g)hVk+&QX$WJ7S5Uo81m{HVBnNh9AW|H|N_ z@WkjuW-9NICMoC)N&Kg(rg3P8YbR;@Yu(zVnp^5`s!v21x8V$m($%nn(obG2jTW84 zJZ=UvhN={889f-S6>USFjI<|dDwE$L1(Xw5UG-M|bPM6^8oMoixyPM&F43R(IPsy! z60dhvv1gdG^qguTut*F$gtA7)2JiY8`GO_SO1veTd?26?b&LESZB4gfEAug81KEx8 zOUtlj_(h_bn%0caE!6ij=u8&NpH{6s({af0!;$9nIzHO#+te15F<<*yH4Zy0hxqAC z_2|TK|KJ|~BwstvrN2sw>6BH?KCLcunq?Y-?*O0fuet2(<*_dr> z6;mT_k9$GFiNxQMvXcfU&iAa3YZ5caHo|mKcShA6l$M5bfX*gMLM?-h0+0QN{CE7T z1395ekt$IGeTf;vbrrfxPGuU(-gL!SjKy23W~#?%aGEYdC=>+Wb*`jjhJq+gX4?bXFV|qM8bFX%D7b5Fvn(#);L?cl-Le2 z=`=T&zJ%fzP55JQWpG7sW^i+8MkF~pjrvNrV$X8bg(9&biqpF*26VE02CCr=h$7;P zYJ{edE~t!otB~9lIf9Q6phvO;mf;gGmN`vU8@R8v500sLMZtC9{p3H$Zr*X9gu_w~6jyq|S;Yr^agL#g<{#*0!%U#&*DV#+qVjVoEWb(n8fO zY`!v1T+0dcl4#G!>Co)p@xb)J@&FxBf~~{PA~TTX{Kk6tuflxktBk?QsLHShYlEM` zNj!n@5OG9JB7=B@H^=|M9GD2VDc{h!tT&&)6MQ+YH#>{z%S_=`iofJ1Fdb)Ab#-o& zV*Tv+9J4$2cw9#OviME$v*RYZw9ZbpTIQyP!CJfO7Puon6U-bjpwfA9<&^L0B!c5*qMoE}q3% z9iJ{0$R1b-CJ^T|s||H6LEBx&su*W%N$jV%7V$0PmbfN5zu8L6W@CF@4fRhfpfDoO z{bZKWGpS=yn#>{>MIJ;9(YBP2szEoV|EAwDow%<2asDmeM#vPmNo&!n?t*4Gp~5v~ zbt=PBV~I&``C&O=ePi8gO|#~i5l0wy>6&SZhymDLrIVD$?`0p*U8&4yDRMw0KkN-J zj*KE}MSn%NQa4ewbC~n-ABA;dXQ`gtTP5YwDl-+?J8U}Fn-jSu z!U1WE5)VdWPw;c9D%y4W%Eo#oi}|*>hvk(eV(DqgG@mwBGW@CYYIxO0{04ZW6iZgI z507&=o5IA>Rj6IjnbDh3JrzgAQXQx~#1uu$PPR1nl)KFXv7NMB-lg=1G&}}oVjr+4 z*aqw!s-8?$rb^WXj*H=&^Yb}^ZOe3Es|m3R2Y%v1)t_~}P1|hyV`jy&abM$AZZf`K ze3iJit_6QuJ+Fp$X z%qaF0TZ)TvO;MD6T-gaGqgZ5$=78>`!D$MaR+)><1?H*do2CoKo`!e2_S*Amo+yjY z0itqE-XpaagM1-3kTY|e*zb&uSx+CK9ZWl>29t*FagN!*?&XH?48KI^EH04#lOxK0 zl!xnqbi4xP&oPh$SICpZ>)c3&rROtCnPNJg9z^Mw7eWwlE-jk*gAAa5qc%_@=#8|N?nxCyJ5XzAj$TKf zql%&g^^lV2)@&V4&u>N1@II&}ZmREVKj|H&Ov_X2TU%ATY-?sKXAPTcm}(iC>C~G2 z#4Kzf+#zcuT#Oa`+&5NaW-_~&56lKep?A_-Xbtn4slqm7*RwMFiDP+I$Q2(-tL2sm z$t}TWP#ep_ZliidV^~}HDOtoPi1Bul$HFjDOoqs<35lK*uJ`^ zc*w-;37k+C{I(4nh`w`sTz(A ze-Cet+>DHms3XH7&7!a9v0OyRlqbQ?_$_r;{XElc6iLswXE|0nHaWi758A?({U)Q~ zAMGCXCSo^s5YCeqiC_5ZTwRXfmawy#y7YERpdQfOnF`Dz`ggi3^ACzyI|_Djzc@|W zC0kLgoddJ6GuTa3BYlE0&a+Sr`G#zi4hd(u>&$$rUgSc+>oWz8hu(ziMjA)HMl!fj zU?qw%##?sV8CR2pZ7Ge@e^0NT7L%$<-k(s>eaW@fz89U(+p7Cu4VAf~O6bMyrcF^8 zHiX-Sr-hq_)&v~>UcL+dCBaXj{o$pNkpQo(_Q&py&5Z5o zdgWYW&$iAt_0qT02>4WZR?>=h`5N48hNiAXJ4I_#A?gt|f~tt(p)T}OCY3wE30#o( zh{vUA2&)%CK4O^FM7C;}YNV>a>Q7=jz8ZS~vy`>cabX|&oHCR@5(u3Nd@0#qw6gG6 zao@mu6bCgYvuIp;rha1HVXy3Z68|F6k#5V*C{;dtNoLdZaw+c;?!=#u$+VR>_0pci zK1-u`72B9@83p0JzU|(V-Vw#EO1#Cf-jYJ6w}vl2m_>e~hO&=@0ys$>V`yf1VgECx zX54!BYmYXesmJIZ7WXWszrC;dKYgAiQN@8c`62g}u1j@}CXF_VQfO7r;mGe+%4Bn#}P}uUZc5AR6$V$Dfk#OHIr-? z0pY2^TmI2LhcDH?DS)B#OAa}n>dLMc9>}-BV`7WugszO?qoKd?i}91uYV4!Gts#kG zYzD|vrbt`)Jt!UrJK45&eib~ z65~=%r2m)wqx6C@_j6ij&&U{-d@R0V%nnO+!(Posthv;htrWc(eh|`z<_4Y=`wOJ} zQiYSe{}z`kp69(&JU76T6}fmxh9^{I4I``%oE_tUhfeS$!Nh$D7u`f$mMhOu&&nH0 zY2xu4u$}^><6InlIARUo4yi+F!B_q}zT>`~{(iyRVUf&7IgMvRq1+7Xt|B${^)}-( z;}9chxML`9Sg%XdOdvwQt8|e667TZ_*PJP&#OOM5T;xosTVRndzqo%<lX9X->^$ zHw*pb$FM%0p}wj8rdJuK8Rr-;8vfC@(QVh9S6#;+fyYX&bW!keE7&f`%Ev|H$-&{( z!OnpTzQ)Cg-n#|axmEJsm8^+;Wy%XRq&$#nbjQC-HKo7FjAnN#)wy&^=?>X2ePoK( zbJ5{6sq~%HWpM|XDkjp)!c^d)|CQh3FDjl>aPAkEf21&__^fYN;9KZZv^l>-aT6_c zoH@r)+f^wp5LeCp&GR7PnP+j_BxfDlYIC8fgK@Za2>woXh?Dv6Yyn+J9u6%H)(&9) z65k}hFW?JJN1mY%{evqbPsS2eMe5Poq545awfUO4iur``zP`GytTsk{7jFp;Av1pJMss=e1C4WsfW%kWV zFU92aEAuwzSw>!JYI4Q6FIJa9t65Gg$0{fzxeE~~kQMmTf8M{p zufi(c$8yyC$Vrs-Wqlt?>X)19$F;oKgQEH9NRlU_**PSvr&4(?QEDbD; zObrdy_4Twq)nE8v&{`=YUE!CqL+B(bK;8_811o(#@A$$E`Hg<<`%Zpw{Ir(z2v27G zydX9s(rwEVFJ%nMcIRNFO{MpgU09}f_STI4sW&{!oCfna-9yzuJQiOs1*sRImj25n zBTM!czbzc{tIZGB&*iyw3U(C#=C2eQ9gX2d&L!AAUE7MJT0`9V&`w7PqS0z-B z|KY4)^P8dRmT{4`F>x8bL!NyjSB)kkYeJ_3y?j%O_jnD(F}^c_Ng*L@rK)qgBp0?t z^-!}=x6824G{l^4o@&}>Tx=+-AE5oJdV(rtodFK7OJ<<~D^n>^5HSXim*~8O`Gvn8 z=eGDh^vkdBQEvls7OAYfD<0C!Po>i|QaC0k1IIDWE(^+;q2ga98n3G`hbc}x=H{ZR(-7J2b>o@x)^C077eNa=0=n1chhuF{2 z4k53vH_Aw_DBkK_SM*oW=i(g!Z{$1Ona`9O!x*BYrksAK@wRE2`L4N!d93M}VVvQH zzO$~1rV612Z{(TMTcIhpnM#W^54GL_)a?(osGqcj&X`52!dX72P8%a%9Re&f4fk~SsX^VD;TEfxX`$veykp}eBOG^&c)N(Q5; zy#=!!ux_%D zxLKqEw?mvQk5e_Z!$eEQ*(`slsimHm%FLOPm7TsIZGY;ogncpR&3m*hh~q>m@j^b! zc8@IgO)63q;)O%=&*c{XtpBrK?wI_o-l2gj(aXXkP*&YXM;njYo5ov`2Bxe@E=V}$ zIpdk&S?=ByH`n>x^39N;jU~py`O<3c6vc-R`M-LH7kw;DEt=@v?S1ZDS#mz`H8Ps{ zAxuzmK?#wl{Y{^4%Cd~K{%zG-Uzlr~All&;{b=nWRUtSicM#uj1DHH=NAOlj^`b6$ z*M5e+cKz7@{ejO{^FR8plQWnt+*l>nRN!8b_9d%(&f%Q&()CIgW%tS$lwOeDDrJ2f zX&tYfK~z*NQMJaJi*2dnftTL*g-Z*S{8zu|pOb!k{_*-}e%@;DU%^sLQ+WqARF$Ob zXvuLcN*J8dE@eQ{_QXet|0J$XT%K?@u9I`2b&T<-W($5*c_oCIe@QPwgtH(%|5|>n zLb;GFyj1k5cynMAd4>&$ePJzpqN<$sk$#Fvv`n%cw|%nCv@|leG~F>iF^tqTRqw$9 zazLKc&R&X-RtTp7jWLfm|1iBX?K5>W zJv8n$lW*VX-DEa#s17HG7{|V?~jmBEq{0(_sgHV z{ zpg8q`p_HL6id7FAj_aRj6IE5Pr3&Itc0u%JpmUL&SEHzGpm%svv=KQma)YgcS#`rr z)oj3Nbfw3CkMH9mo$ceY-TpX}tFG;|p*gBtUNmgcUM5_S;%hTwqm9D6|5wSY;+I8N z3m+8pEvQ*k-RF)pW&Y;(Ng1HFYLa23y?0Ep>rku^x7>Z%(?8*N&$jqYaij}4)>^6= zwy1~V&ERaYHyg6~D?G;9I0Fe@##5*XllKIHDFl795uU5%w`nB6a(R8LMH-@=Otry*@vwFh3 z*E-a`!_h28asIGctq$jdn03y#)>ekangUH(eS7`inx@1rWrJYkTA=z89{w5xfgdG( zi!T?ADtcJ#3C2Yyvk!!C(gb*msH>lA+2nW=V{?UEy0}hp{o{_uCdPJi&2!GR%{7I! zYgPB~mtcTQb7>S8>L2LpKj9lxl3jekTiQFQc%E-ez#oESY33(aS{x_egB|b{>Yv)9 zI-NdOw^tXho2FGXr1~*o#C^&y;V8F*svW71PQQ6{13pV&QSQniCaXW1GOQ|FE0hCX z?TCwM=NxSFTXoKH_9y0ThWqL=s_q(%&aENv4$4vCBFZ#ur`wT2cuRP2@TUKV|3ct+ z@M}0s(VS0QqU?bJ_^g_0_+bgz8rVkV;Tqv;;Ul3Pp}C<|!Lp$j1|qVxUajfX{@>gx+?o5uh?7~EKL>9@pf)4yP9po zJ>vSa?U+OKEL6{^!>|m&Iryr=E8)0MN#w-8rD^hP`IfXpY%4V4PjP>6-T0%NkFnBP z@=?eWIYU<$zr*qP4{SR&MV)9SVn)Y3a5qo*FCmoh-BTs*g7d9&VC)3f4eNE?YrHz> zg1y1BFtfaiZ_gg1#*vjHy+e6HYq)fHSFlH5M4)5vS9l=3mrs^%DzmVvsvf#g<_q?P zF?j4BvFWkZTnRCg98+x-Ej^6;wA+Ytu$)v&XvLppuQ0z+3nTTyGr~P1C6R2h7g?Qr zM|!C{^kU{E_fc?5Bju&aCnX!M1y;NgJ{EW4Yhb#ZD^?P#3M)9ADNhazEc2zHnBC1K zN*7Uomltl=Sj<=Lg|5@_&D}95;?u-;a_M6a#XABDWy04fGZ0ET zF{eeZnZm5zofhx_69N{aR!xrhW&eY8HQjyw-#hDwLc0B|GZSx1p}k z6{v6jJj(LESL#an+;rvw!*F}W(I_w2m7giBh7~k*jg>8TZI_+0>vG)V_^xsJs6sI? z&Kg6Z%+5#kFai(|KLMPwPW;JTVpniA_|d3Css|%c`e+|=7^y_oMnBP|*q3Y?KSf-p zoX7WRBL;^l-`v*P-ge*q)v?;q(EhB7jcq?_}jSOnCBlnOWNe`+> zgu`TbSHuy0NR?qeF-_SLb`2Ne_VJqpATAP*iMc`pVW4nb2nrX4k$el*L=R%V3-jf! za1orSJb-R>RpZ~5{kB-=s+i8PQ{(BlPp;dpP^>w2fx~UV^=nnrh{MEH?1JK!8uRCw zD7}NZ!PI9OGh^v9(dp4LXbpqNDwK(FuxHrXoSH8byJCLzUewid*i^^zyUpd?7PC2~ zSq35B$Ba@@8s5O+EK1H9U z$1|OfZZ7k)geJmxzA9f^STFXJ!Xm0NaXt96QhDIS>*5!&?sy~hANo?}2A1cR?)H3V z9oI?MBG)w6;@Ed_Q({{=cA9@PK<$6(*2Hz#Tb#yjp{kQ&xL>3&@+(3|rlWqtR?(VN zDlO6H*nRvJz7#)`$HYsr0=5%(RR5|Ibghkjuy^LXK&|ahuhxCI>>zA@RxRq z>Is-E5qy8tfxwZwBbCE!aB;9kXmj{}9kd>v1tL{H)~6pg)%uMoE|hPKu* z*3ycqJ_L@XE6qh6r=gRnI#ey{ca#r4gL>~)hJJ*ahF^t;My5yN$$h8~vN^+Z?WObZ z1^%0w)%?=+H_o)wuwQkwafTfcJ7Mo?8)@~Kji#geKQ!Bj$G|V|7k~0KxgJbCsy2B& z3`5hy?IUL+1lfR`M|O zUBD*tcljXyjc+M*7k=aGvo^YzypNE`g@;5BP*WHc+l|BISXEQwUfZ#l&+(;_64FX% z-OKKly*Yb9c5G(9l=tqfF=pE`Q+s_`l|vc8OZ4y2$>bZWQzMC*x>I^UAf2r$e+v!Msz&PCOw=}giw=J?ZwRAK| z`d?b7h9??gHI+9)FLoF8oqQ9i7+Dm)5-JK_37!b`j&zK2l#`jjSlMcvmLJ9E@KLrV zlSEHI^`gPt1yl>)PxDc6rkn%^V{eK2nmk=u;~rB7^F;G4(*olnbS}kol=>G@79*4;;!0HYb+BVm zmF84*KiQX@Pjcj?=%MI{==|ul=>6#UXdzjaY#6B>nGki-YUW>d1lx&QDUHQGsvql9 z%mZybWBR&{$xO=kv>lnL*$uOcGNrU8$vFuH@pW8Nt-bV=Cjot59VOHr=)O)eFN{)0E^+1h{ zEDc==hJ&j^FxWo$E7&-EBoY(dM;V!h>@MyrvPXw7O1vcPl&7K@$*syCN-UZc{0Au5 zJv@^rO)MbhsWzyWYN~3pQFl~WT~D2+V)3U~Uob)`C0*d#vhC@|(G_I1NHBzjRtCQZ zCI#9D?18F*VJM2}N!Fo8(!H2nY*YxtvZ~tJ8~S8Zto5*STl~a?mx+s#+oaA+i%tJK zEk1Qp^33VaQHiwupL z=tS1YcNXf1@zP%DhTH>9*;Gf9kr&Y1l?6@4Hpj=|y;0XkHT(u1Bm%0+YK{7`s+4LQ z(V3WlH^bE6yrPk-h=cj9EY9RoE{Y)uvN4K*f}u4bLuf+~4;>GmCV!)n=$`a(Mk8EN zc>IkzU)$Dj(j2hsV?W1F_2?6CCMBk9OX;3+H#sZ0SCThjy}M29-wvI1m66vjSD7)D zyn!1)sUpvU2LmPk>%L_r14|nCmiQ|M6T=nA->H|(f1Dq6uPjGBX%U!*rpHRq%lm3|omeCE6gpI zg~1VFHu5|=hyK9);{3vUiBZOb2G|;G2euIFiS7rtOlW>`HJTXQ zho){UXl`gbnuZ+>L|7MAQ>w`WCB#<3U|!E1Vmi{jsR_}RWa&ufaCSHxu1PLH{THpM z$MiC8lC%-5CvL0D>v|j8T6;MM#kPvS<(`pHH%Ux-m{dDyZ=yMIT7u3a#ddcB+fuU~ zo%WWi$6(p=8h$Hdqw0}k!&QT${o8yl^myY-2y6(p2oEG3)MWZ7+dw!WDavcWU>ESw zghtg|)mAk{HDC1y(HN(}pJ>Lk0h$CHqKuH2NHjv9Uuq>cMpGpY`G<5$IwzfxdP|?g zabjH|A5B`su^5U)>1Zz5j(kHpsdrRE+J~?^pI6C!!6AH(>Z9g~o-uW|@s9YIGp;dl z_u^N$Ke-Ft_ubRoxZ4z;>$>HF2g}kJbtp0?^fqwOpX2Y~ zkNR^1)j}7-N~C5qnO?%0g+bDAMTQ-*o%mH^tO{3ORkzd(((G3cS5+s5BLr>15W?Ue zas#Qa_*NJvc=^`+CvF{Ajw@!fxZ&JZ&dzt@C2j>*o(r)z*^BHhHW77i%wGT*HP5xhwa4XmiLQ?^pB&v$oZY~3 z+Vt2^Q`bU08eb3pl-xoKE{S;?y%70=Y%VJ}E2s<2LDgq%#7drtzNT$lS)s0^R{FsC z08LoqUx|?Fy863%rCO!#tNKE0BU%!R@igoZ%uy6+v=kDnh&_bK{9&#Pm&EnrW^>0- z56wHy$2H;C@t65`=&_Zr&f|PKf0`dJWQpCxrD8qQrO*iy*lZLT-d7FNG|}DEtBs{h zeas6j3#?U8Ctkcg-~QZD(OK+h?O1C6YU^!VV$HGiGkFZ>v~ARWd>5#u97X-^`9*XhWm!HLfN6dp%0-`;k?L_=mz>B`+{FDR*}yrEkF`36ZO?V(_Pb6Q%+M+ldd^} zdaTb8-S8ji`viI_X1PH03q|}zZXfDM?1;K#HZxC{A510Gg;N*p<}BjA6M`hHLKVBC zQaAa4{Fi)OPEh>HYcwI>0CoF?FgMXf)j*x3`K4K;ZKkWD&({w$q#0|NDw-AZ4$Cu3 zp{23)u2pOMX02`2TWXj#8WQw_w5!y;h*KaaFBP}(F775ef3%L@(kD{vPvStI+()Go$;%M=v zP)7K~AK?%2Z%}vEVSXw9l%FhA5I>7)Qn8pTE|KQQqfk#|Wr(5K!<}#}nv|}Hy7oe7 zhU`988E=Buz?D2StuyX7 z?9}(vUDZ59XWfd}ZP;4*F7*&0U(BU)i&%4SJ#+$YWzn~U|uHDX2S zU(}m6OWq+rkdu)=jX|@=B=i6VO)9QMb4q`rIlWP+uXru$n>YUd#A>!uAMxT*MMSfi z9Y7y!CjKXJTD3`iMDt4P)fMQk8ipA^7-LOkO#$O5<0nIgp|5_Du8-EHIi?yy)J1ug zGRT4-%707y#c#rI!afw|F5;3=|Ir!tGV5h~a3pt{|3j!O>Lo$Sm!B&a&^KME(9H7_ z>?@{6677PQ$FHMl+5u=9@f@16{Hl~zCdnb`lC(>jEH#kQrOHxY>4;P!rOU(QAM!-> ztpRP&gU~Zhxrk2r)6tCP3go+*qJGRN zh)tiUuBw-4f~dQ`z4nT>h3=IuMeo)Z>;Bap)6LVB)9u!t*X&e}RaGHa>=D=wn=7BC zJ>q)de<#J$Tr1ARIk-05HcsM}^W_kBx=91&I!ZwK8?Faiu@!hV;&);aF^rf+{DC}u z2=9O|!cL=k-wJTD@-!HKaY3?AJ_?>_zM;D=2v(VgURJoy?LeIB~qEtk4vQan>^&sBFHsF(pA*%N3 zsCup@Tl+xU1?>swIo%5#>T=Xw(pJ;1)Lc|QSDh!u;BnXoG;zOI-X`r5{}jIS15mGG z4BwJJz?+37LbCWk+$&v^?I#Kh`J6d;(mNPQC4+ZRabpX z?a&OLWc+n&^pN!_u%tfJD=oDtJkmrF245-T%kYIeczqH+^3fO z53Kan@*_DOKFiBMQD_W)2`Di*Lo6(s`)@Jd5Js)c+2Zq^7_T=nmuoJ9vcW#K5H|JLD-+ z0qj(V_z<3WF(Jr*NTCLi42JHbzEQ5*x#1PRX5T-g1WrO{GJDIneyZ-_rdrBqRx2rg@@yg-)Z zDd4f64Lp|e%6@SA%u_B@4u$jQJos%F#eU1x z`%IJ>#=5w_+*R<&tQL1l@8n{NrOEB9jy1&(;|iiK(FHQE+C%~&$ok@HB7K;)`MHRsGKLQma?Tw;y|$k+-KW_3c@qk%{@VRU4eJ^+3SIw&D))kC-NPmG;0LPzNXsE9DLFwM{+- zJeKp|@Ox_H1-_Zigm_p*h_Q~S_XX#dP_f?j9O%?DqD3)*+%JA z%mUWLNpQj!hgjl0ypH~IUAZLqyOY4%9hJVo`#J$nYbrR4Crk5T6>fxAenW~#HQ|-s zg!{w+9G0#?8dwXr+&=j0Xdn)h1k#8BejW)9WEBvjf|3e8 z)n@W+`JNmHj`^WFoG{?x-vDarnsDlW1ujoN$h(ThGGfV4 zi^t%}JK+1$u%fF&hEpAw21;N8ypyZ1p3i0xYddS0cHyPNl$>#Q~ zNK6Bo>oV|@`zVINza0gqKtnjw3LsYN1zDa~J_qlsmQ2XsV3i(~)=E=gjhB-&(jV~> zxb2&ZI=ERD3!NbAa0v#%0e>F|-t`o56YS}0(r`Hks1a9yc2g2O8G9ifC&AxTfY!%0 zVqY+ji^F+!6zhu-*ann9hpO*_nzpy8(UdCb)$g0SlrX z&=IV10lcsE5DS))Nbwd#hC9Sv;wkYV#8365oe~cBJtdEYyJ4H6lJcc;xC%wKBE{6F z)s@gQXg0POdxgnZ0-k}#;WTyz>wx`6d!UchrPK?+nN?D?1fu=k@LK!9`raY8g*8RO zYq%;MlD5MRy#(tBlS{!#GE=?+)r3CapCV($;Q!bHtPTMfB&m>h=#)_)J^ibg2G3Fp zB%n1w=P&{dV}slm2p2}#4RO#OX_+(^;=T3KUU;ro;RKR_y4D+Rrfu?Z`MUgG_CTcj z3rH|q;2m{`ooxfo#06;+RQe`LL!>TJd-&^G>4~I+eYXN)s`jxju`voAaY9(E~i5fRhK~8Awyul^n*+| z9r3A-s=7gLzX=E==io+r7V82Ov>9+N-Gpb+_SQVn|KV^I;Eh`j};Wof(wZp1%evoHnL8C|Qs2?PM0 zs+h8-qIs;Zyh3^j=j0KHT|e?0`7!)xel7nJyhKX`g}6k_lHN-jgV+hP!v^?=>J8Hg#5Ujsy^`wyZKgO7z3M8x%6+PCh(>)vor``#2V+637k(1|fk$x~ z|AOzwYvCs_9o7ZipuUDYS3QCV>ul_f{0q*_y5fAu;yUvR{t0&udSkCRnQO&g=bH)n z!b+$#N2F8o$XE%W^vs5g;D{;}nTtF{`2VLWVCvtac59su|39^@j$O8I9PM0OR#1yft*g~8r9u|LzMWiwC84&+}|MGHUsLmZNf0sJT`JPELCz{r~SgMV+VK4V9)}*hu^@ zUXPebY=X?L6`>>+;h!-KbD{=yRP_Nkd%a^xaz41n_wxU8?;&el%A_*yqbH-+qY9=E z^A|d-ZMYJAg5ZRk(4-5A}DzJy4fLf@j>(AH=OYIL}I9zsHO z^d_wSrEtmv-3r)>1UMaAOG@bhoK>?Ss~s(lfm3fWL|!MwmvB23k($FwSR`$e&Pp#~ zEfm1lcj+ag;Ges@f=OvIdbyHY zn0AW}*EiAk(Kpu@=vwHOYqw~|5o!1~G!KbWwNY%4--7R}HMg13K+orT*h@913dk$u zLo$)NL@f+2r=Ld3Fvr;jyhm6oRf>I8%vRyBgYIE}@TbHwO}ciG_KxMiS_0=9sg#x3Da za=(B`(-9)kQ~Xb$t@IN%3(o~v$PqgLIcJl204f;Zf$WO%|eVx@r5l%{HftXIEB zjZjOPtvRFpq6_PP8#WuArq1vcZ`xt}VR#3mmkjN2VhYw#ZB(6!^^`KeH~203lg7e5 zNHk;*&W3(tx4@0Suwehtzhq?~B6y-&uDUQ<+7-K?{1;h(*2bR@8QNT3Dg78j507@>Mu>A`BUDw<*SJB;>kb<* zlg|9WT*Xq=a?`9erJvcTjkS|8zSen zLL0sem%=)j#Avg~ZknKHhvUMpsDG)8lrlU%T!bzY856zB7`X;~UtyBCKw2v20?Vpr zOc!e}ZSIIcks>yt||#xi5ZD!64uAh zjeB4}W-%K5nrrBHDZiQ zfn+-hsIOaM%j6CcEmY)3u-lnc%t5Hhj^wUzJ@^IU=hy^fGJaFH+@!UAb@oVjk<=|^ zTxu|NZfd=h7D;R4uR1K2X8Lp33&km+By)hO6PWGkRPgxkyg%dr9R1VyuP*;)VV-w< zuz9#3YUUeCyA|V*yQr3UtNErKr+cTPb@z00bggv1v=y|4npc|98WZfzWkd&}Bhi^q z5l!()*jluO`k|_~}JvbU<4s!E^TuG95 z0iEEzY7uf3>4?lzO;VOptdx>sCBE&&Wb?q|kdQhBtw9Rz? z>5Aw}f*-@HRcj9uAF;RUi>f1vyK+=Sg?G@kx=qKZUF2iXsO=7QBX?3o>D!T6%y6!` zpp!iE6-9NZpk0DWMKhFDH&&NK&L|TVW99SUWg5jzVOm6Z>T9TU@Hu$!KKkzkPg5Hr zJK3&6E$G^f!29W=<_V6z@s*RNq#jQ{k$yH!m3k!UVtjk&b8EWkKW#Vcm2#ligP9Z_ z5qjBvgQ`or6_}+NHc*sywzf3!eaA94M5{kWIUoIF`MoNU&k)1+Ag3kjd1IFOjU<0yW zn2fAtD}jT)xqhEF873O-rb6RvV^QNYLs|VgZGZ@4%hVcGz1T-#5qmq*H{6Si2HOPd2ImEP zfanK{*!A{_>@uhfWd^t7)ZJ_?G8m9aPUeEesbsk||(aH2p zDu-MXObPtc7S?tj^i~eY!3kt$I62audBpkQ-gL#< zD%T=y(7PCeeN(gj+Z-u()|K`4OE$H z{j;E_zSVch|0B4R>K(~tO9??~Aao_)st*Id4b?2x)YMES2H{`8B?O0jEMICV2Kb}g z9JVIIMgGvg!zZXtWL#)Zz~@Wx)$z6U{|O!lUuORBP2_8;68I^dWckDZ zI_rMcm5gZ0se~oYXO^3~UuZR@L9E5B31@{0eVaVZ-JJ?A7Mv|y?cES)Ne07)=o98C z7ZWy1?I7>bD|;b(;0*euQR-^yzZ)8vikl^K$g<2j%Sv1F%$JOBb&H8{>btRS!b9dE zZ3gN{^8oLg>WhK#))2fDDiThQ&gaTVXBCH$JoGtUUUNcoO*2pP9r7|A9zX}H_o@O4 zeXO*!TFBr(v$vQ(QCHL&`50aa&cLyOHolkcp@mcd;&yxMg&Iflxmof)wO4c1Wcb2K3F?YU(T3mK55x%U2kh(_uGng(3#byy0Sw@SeNN<}$}4Md?;l&Cqjyb?>9XiTUUL7W_>unCsWV^K(4K>KBX8PR_WIl;$*;dJ@G{1wzZ{7HV^_g}<#Y z!FR@6$vfQJ&{yAoD}a$fiViQ1v}GQ!E%|doo|q%YDY6j?yQb}Fyl>fQ_d6fEw#Mt@ zr^JGis2x>@%X@2J2y@*N#z&hb^Hx3T`JiE0K- z!$0BGiOU40IRSU=S!@Y(xW^)=R9>K?KL)bgBq>dN&)YZ?6CbG@u1Qu4Hu48NBMLu( zM`~goQE=SzDp(NtDJs;BwOh@*<4PsZOna9(smQV-+kl_fFY{h%k;K1_N2V0bGNi3c zvfJo)bc9s3WtNj7x2OuC^uQBeM_)7FX`eNa9lA|ji(Fz`3KPJW zUtjq^m8CYIt0AkZjn^cqYVtLlW{Ktv5ySo1RFp;vlviQ}QarFAa@h*eSok{mA=ow0 z!?)R!>Aqj^D*s6S_JZf`w&2=o!?ljRMOj^GD;KvU=|Nhn%mtZT=83Fl88g!?DbwNw zTV+#c?Gbgw*l~VVbTf4gbUlsz#eEII%i7+X>fP$i@crw1>t7mdN!|<}VDoFXE!#wLOlTzBBjfz#e7wiyNp~+VEaIN)?H-&J*(TmqeIweNS~&hqSd>~L^IB%H zOjB0l%!%nIQx7Fwi0f+IWI&0|%52dLZlq}tS={i6-hX_O@1QU0o$jmUAK-5im>b*> znnyj4?B<3^U6hT`r$j?NW3t$8IQY0S@w?-f$M16Obe^_7Hz{;aQA~MP+{c!P9Hicd z?t=EFe&}mxE_sq#27HNakqV5PJ6l+U-?+REu3SEL{Yjg)tHv7yB=U)2^%X|Yv zv{XNj9TnnPS7bAlOFj)1gp^cv_-l9;D7^PaG@!i2L2=>Z+KXM}1&Ul`6{^IW5X&{? zwAHn_AzFa-iPT`kEI;07Cnd@ z62t=9fI7H6_%K*6^fGigv@oFHuqrm1KCH6C)|0_ceh&GLf1TE~HJ-QVhm=nEmV`E&{C5-$DziRP3d4 zhx!CInP{vfbmeu`bX~Ro5gevOX9Hp7k+ewo!2XI-v@&d^zLG!47F6kQLHK_xiQ06V za7k)ys24;EHT?U6<>=LXXXOPv*SOemE3taoxXipvO-6cJvy|$I*W%(F@6B=gW<($L z!`Lpt!7iYSl6;_LU|QfyATv~w3;=JBBZK5xawvR?Ig~G4ju|GH6ur^qTARsao9Xns zkc8*)vt7-c>usORe+|90Yq3Wvd+eleg)JT}26_5Os7YkNYa0;}qE^uUJYp4GHq;GD zLgy3}S!so$H}5w%+W`Ad_I#BeMUbD=Y$I=nVJ{w3#zO) zz%NN-VK_-KfW=Z4(^HW>E3mrDbdH` zIyGw8XRnYnG850)k+VH#dXetw|0Ku9PqV!=+}3ZxaFnww1aH*vbeZ%GG}#Du}Fko}$cKm8-( z3UEy3%0tDo{4VY>t7JPfx1yQRLy^{zgorg_kK_W6>rs?}IO`SYEXT@o6mL`}G=^Ou ze3~{|hqk5W8~z8~kKjNx_yoE=oEytF18#FC_6nQEb>Nn9^SEp5CZ>I~EA1z@1^vE= z+h1_Gz~LV6TR|oB3zbVW_pM_RR;2wXa;WIaoQg#ZnS)Z^x|TT@^HyCPu?O9$`U#4` zxuTgH7=>hv{5M!EXbg<>0oT*}z^nB2_3sX#!APhfeVEA>|5X;lYw5dMS~=bEd}4*< zHA!FM$2s>{s~X>HG66AUSG<-EiC=^)frkF_KU^G}!dO6M*ogiX?iKD4Zb>hTuu&Xt zRlOw2He~{^TK~nei4TOActeyU{$i;prRuJDD!mj&@S{QD7tb|?n&41gCHw>R_kH|9 zt^wOG+J@Fs<$|qzuiU|cw1P>6^?b|8FU-4GWn!DDwX0?tnYBGvFQYp^ zl~xn@)i>d+>O$9!HfOu@r^KLKsPL=)$Cay&6QCrw5YzE}*mdEr{yp61B6GyH|XCT;wy5s7?K`KT?(50<2-qVBMRTR^Zf0@UhanSou+|hlj}_? zmASJ>QudR~bE&5im&9$hDvgx3B~bz034NR*$|8x5^4<8j;F{6UC{b4er4K-ti4$;vPxxcNI9Qii7RiNXjrc8OWZ?iK*wc?%B)x- z?-PzO(`b_X8Tbyl`0c{^1vd)1xTksYy)6PlNDf=s6seJ_KE6x;$5P6vNx+kdlv2ql ziTQCyY}qEa_9pgR^(J;(Dl1+Pt_xd*G@%|}z}9B=MO?rQwvt()JHfu82r`17}0~+fv>|#peK+dRg%Ia9}$lU&-sg>%&!6}g|VV6mXNAQg`oIq z#Vg^gsYU-Giw4K|YI=SY?krs9{_2}e)nX^a{=@gdefu>@n=vfwKoPvi-1L3PrxH?~ zO)Oc)mcYOpg&joOsY|KWDcZuWY%GIs_1)vQdh3-TCC~GP90pD+@ zm@bwC6}$nul8dFG(A(9E=lSBG&wm0QmZhNr|6gwn&&9$xh>o9nZv>Y`>Pi%%(xup_ z_$H}4Gu=fR7SU#U(zYjC;-5JFTDKUr`uU*IrO-<1d*C5a^Gl<}sA>U^cch!kzm_*6 z?^fQk{8NS7J>z_3f=IXtQ(pY6+=AN;{j7E3xP)fO!&2f?!im=)&)H^PtbauuMC}Nx z=pGvbDz#ps1v(&XbUEFQ$_S+aEw~QkJx)I#7z`}lRAw76J+3LvAmzaAyF_zWo1nX< z?WnDxnSy^r%c`5GHY>&h_wI`HT9TlqyC1X(L*aM(#9Uzyca}L8`5b;u&Im5_pZ4av zR}|(H78bg|>)tI=UL+8w?wGY%{D@S2)~6y{ij2)Fn{g^-ZsG~ob-TuL)%Z}6T))3+w?O(+|;*E-HLUO$PrpdPDSAjQ}`+CoykQl2k`p9)G9F!_9b7Vs4E3+s3; zdJFx{$oUbKa8Ws#_-)LyhvHWz-%MSSb|ke!a+QQ(&Kzq!<0h>W$JIrZb>xLYB6l=8 zh@MT24;>FI_rLS?_w@o-P*Hz@zgw_AIW2rF+MmyrC#ZH{U9}4hxOpw;t7}?bnNAz# z>iTLL;w{i6NC4E0xv_MpuD%jZ@ng8waIof7gLGcEOu|7qT?n=!!H&zQhKKA@CT#568%pp|YXPL03=$vi7#XsQ?$~9;_Am z4rf?8(?r-63nHb62f7vD1D7rJteY%D%o~Asnyu9mx6s|lG3CD4cF8SF;OlZd+0V>C zCKUZQx-vR7+BWKpK8n#Si?q@wj7q()F~4 zS;MpQi`>ebm%b_`H{q4@jxEpFNt=SzQdSb%usIQi93FK0tN7I3y55RF7vJOwxVyN= zx#xJM`Z@(w;r2{B@q=GDg~^_yzMH}C)VfGlHq56Fyi;F=aqcECq;} zR}JrUTY#D~0-b@(QLc&YmDUS|+#}Y^{$^Bc5w-@~ksZlSU^hdz(#tmDE^^Pgm(bH{ z&UFNG?VZT0@Gf#js8r}v=nTD>pRMp=pY%q{HG9c;UGjz0Zs{M>_oZd0Hcyt~;~aHO zr->=bJpNz=CS?Rp_|E_hvZ!aYyELrw(w?yAh_{0OT(B1Pfv&@(z&wuIVuRQ?)h(=o z{=Mahb6Y}ka*vb`DZ^7PCB2C+Pu#>kn z@Ys$~xsi>mUKHeS$|SS|@kcw^fS7<1YP@9}YBU?~7?g%7y7`*k_*?Z9)mFtJ`Goit zPPM-58Kx~mMterp(G2i=?}|=mrox1mfBEeKDSQx)gC1xmupH(@57Q&Vld0=u52}2G zO5ZV7)NUxvB_t^VU4 zPhsoAB=-r=KTu;tLo~SrJjP!l7&C-@!lSZEH5Kn<=wPkq{P_P}(V9s`lWQfnOG-|t z?ksK@t8=R{c?07mv-}6$TM8ECEAm_A-^;HGnZXlJnZQBnHSEm5{)o1U*y!-Z0eX}~qtCGK~gHpevSh)o*7b_WWQ*>CWyE=bD% zowq1ISTNXg*MErY867Q*jpeAGAXn69&_Vb`%`e?T!+2wLV}{{BU2|=3h*uvF!--S) z8mu|^L2oLn#;%A@`2VtlRV;w_(+xCb+SkUr=hdt-*88$6kU@aDbB0Ppyi2=y4$9) zwxF{{!mgx_$pe!t3B8?Ftf=vpW(wL`iAxRGa^V?)v!2R@CkoaVK6cmj?)F{tBf;OH zq2W!@#(bQ-T{%tt8GA|$)qd2KGPE`pGd(tyG>fJprt^jZU1cpxMDh1nH?%WySMgc; z!Ix$GL{~(HMS4bTk!$o0nxjeZELLKBgF8|OPT_{qeTZ}S3mU<~KV>U03!pD!q{@?Z zsauhy!X#iJXf=m)my8o_L*kz%lWG5Eq-UH?J(RfJdC0QV@Iq4yJretuJ4#Oro%Pjl z56-Wh*Dn7|;Tg|w?|9#6m{*|=mL~_%YuR;TpBSl7s3s#7uzA|A#%{2izr(V{#e z9f=K4wMDxVb#z|CO7m&!3)?SSW7|mUMN1pYNAr7AGh?Q{xaK!{MdgicllBVpI40Vj zR#HV_`p|@6TBtkujjB!$h^&l0V{*9dJSJ*Eb=XPHksC<|;2OD>KEV_hE=zyq3ZVLGs=TOfqZwl;Yi?xK*zY*r$8kW3`k8b! zX+=UiXNJYB?}w|AYVrp55jD)etg!OmpFiV&*njN#S@h4Tyqv;8-c!Mobc|~TKITnC zGkq~ry2WEvIT&Y4*F@LiIG6K~y_#*YWuhspH*39ED`ZUUsj!cI5{XguLw^5QpWo~B zjqpW$MBq&jp|a`f(N63f{<^3F$KDFnOXL`^QR39?kxnX&vQ^9|P2~H4D(1MbGNwl! zfU@{2(gCZZlg+)H!{ZmYwmZ68o0_&5J{dk5YU}Q!o8+p@O_&N249xYn@g4B0y>~om z0N9-GKOA~bS7vANL&ZLlC>@TKR}DosXs#O^=APze=5yx3R@qT0J~e@HEpt}4T`=9% zm(l9+O2`zs3VWJd=o1Q8<s^{eutoAky4T~)12gY6@Q9MWA()``F z)w$AjExu^N@A!~wZd{(jY(H-~Z7i#sfWJlhDtzL6u0m9x#9(8;*?Zhw(cRR2#eLS3 z>aztF00p8FbTl!kk0Jpni#Eh7YIefJtz7+FT~t$mo6)1nmB54D#0>|gkC~6>C2kYf zh8HCkIjm`8aG3(evW86XIF2+eF^@7AGsNSY6vqV@I7%jQ6Bu=PV6duxzHg8JS1_Au zNN4^dn5P%~2dQhQ!E-MHV<-Iip3VEtfPV>k->w>>~S&&I8) zB(Xd*G5j_(D`*I8_R@u=3+Ch(DY)yNa#>QdBh`Fvk=#02B;lq53)Q+zdikGYZ+WYwcxU?U?6$ z=4|RL;}~Z9VHOOhv_^aja$J!i9}hN0kg<9 z#(vpW!*;^{!SUHK!8yxrG4jM}bOvPmpb_R9&{@H@zNelK?*5+k9-BMQZSdv$>xYho zlbKBZlO!k?s*hrK^qLBmW@&EPRhssM?=qyZ!UaUTHBYKd1 z!S@i)N-JW^l&^tj-W^;i9}tuJu$lxv#wum)*kLhId>~bXxt>d)vo&1o9h;!m5o5J~ zb)9rAv~%?3%*!oKYlh{!ejafLdxfT`lT{^S-MF0a{LuQ~uRtJBBk+%Js<)iKNoY*C z49r(KB_$}{AlW$R?tl-`(0sydwqTaurk}=##uus&=uZvSAh%M5;k@W$o|oGoMtryCsIIs1yQRM4W!z9#p|gc! zqrE8*lA2kYnZnuu*ndzVvV+s?u5>&x9g|twrMMBu42=W77DvmlRp5i=JgP#Cq$5lmgz1OQxM27 zwI2I`chfE~W?QYch~=v3KYfmNo~E?sB6eMMUU~@Hwt6rjATe?*T$LJ6O5{ZPI1}PZ zffl!)L;<_<4AK~VicZFc;V7dYs z&*c)>>CyT0Z>k|xkvdE53mfQ~(3{1XQCtCKP@Q z|D^XtYj8%G!WdOvLMoyL_(_vthK!_tYDhLVG+K-rLn-|$9jo21A&B2-KO|A};5w=xQidxkR*MZ$9Y;%Rw(EzR##+8uui5I_x7c^t z&)Tk7>YEPfe`&x%i|kQ+7W;5(qv3D>`tli|@Old!$vVMpp%xU(CxkhZW?`fBEarhJ zm~&7MW&-v5FhWPnzz2X@P>p<2#LJmNWwu?UVmK*WiFVPZ@J?zR#LSG)Qc0<&;a2Sx z{V&sUdjT-1dd2sNTVtQb2r zv#ZWIWm$-$>NZ%+O4R6+MCIif=}>NNq)?Bynq*UQuOqPNYBGFx)mgI5LylDNj|s zQdh$GRWuEIhm)F3x-0szhV3x1q=%ubVVeGlZj5df^gOTNWwC$M^HjeT z_LyEu5=gdg6rqO$n%F-KdRWusjM0KUA0e`R_Q~{_+n7=8G|+0O2;_BX#XZq=97tX6l9dHDY9*XS$SNmU41{EK}f{VxJ} z!8;)zSwH+wNXvA&w2x?}p`Kv8IIykJzBP8gpX1mF+I+U1%=q6X$h z6sq=$67nzLN&!QGeoCzbj+iEx7Fgon>Msnu4Q;1Z(0`-Lxntsd#X)sFVvw$_@r60p z_S*3*Zj-B~D>H6}qpvOAG79wMI5tn21d5Hq2o_Eb4e<~6*7OA3`A~z@2ExG#)NT49 zlPENm?aD>SLNo_IOpMda(iDL+Mgtv??#Nli8R-~*nz>GYq1I7Ts33We7grLPdrj>T>l({1<8WSp^AmQBsHl+RreBt-yHER0o)0UrgDiy2g?Qziyp&AF&m? zuclRmvQ(_Q)KKuSf1(2-7Mi2Jlifpgp%T0$*e^7KJVLz-cLB0(CtHj@r)LH%t%p1NlC|t!8p_rCQbATR10hlR0{qMUI?XA z!|6&)m|H78lOHHfsD`SaqPMYsV2<5otSt&FLYWgg2`qdX6uVCP9?Y#s5#XNPW5@*adAJ?;AW%y ziM_g$#u1j%wuW{+Ff=PW=G$Giw`RhK=|17j)Dsjp#j@Pbh(N6fZ4K1-@Ad8R86l3z z2I|yFdIZxD^6o3KnyL)-M)WP#1MiKWf||<`q?Pg>kgG;QCon#;4`#Vc4+DuBo{5*O zBYjq!LoQ-I&2)pyYInYg>*RIVR>)fW6ChD*MHY4 zG*y5!xfwa2ycjzs?H1l~omeiq5-MAxfgCe0G$b@JbS&fx^(OmLDfE!2pB*F6(o)5I z#Dg{0Rx&I%b+VkWKDT|f4{*G;H?V0eS%&oBkfvbG?O6QXNGv6@{2Vp}h{xKzplWKuCcJ?czL6WM%?wP>S>c^+f;Z zE3T*9Td7qi;6F7x45)RIqki1yxLt9JoLUEAFK6v*Jgu3k?hs26-Z2|twIukfcor6# z3o&;)&q8le|J>k2ij4@IEOk=WRL9}(G=22fj9nqVYY1OP%VP5d(-C8H!zG!m)X@g?zAJ%w$6GOrUsuNo+HZGCw@J2#_xr^KCP!%VP}{PFt)J0 zdy=PxFE5ZpF%dmKSK6$wsVCwSwCxNZP3tY&t=()lY|CxcY&)zCEQ?I<4ZQA^<^X;I zJ&jCN4vh7cOhQ@i2TWYbr{{({Qq9O+p>?6tp|zpEp>q)V#?u|6d)Vu|Q@R(M2GjdK z5-)Xk3`b29EH|xBY_ot3YOqbUTrv$Ye1*riUj0nbNh;4bWB#QFQo}+I0=NBx{UiK^ z{=b2=&@pmtct>O$6Jy8oKB0=#103Jx*cv$ktcD!mg62|z)eqX;l=#U`7kS_u#5Y; z=Y?--@E-LddYBJOWtB73XYub^pJA%Gx%G(czI~MAs^h0)u4BEux$UKeHeE9G*Tslj zY!Fb2HcM^zS&TDsm6`%^>Iq+(cbI3r=Z|NmcNff!eHNSqDq08VBu2*etL~x(%}l+~ z^vGPA_Q-b3_P}beC`^C!1GNmk8-1aA8>;}5NRF`AqS?@oZ%VZ#ABH-GGD8(Y zy+hYS!^yT(xo|Lii#{H?8BJtIa0B_@keCb<_K0`n6y;@A6=X5e3;jSG*PnyudBXb8 zHr25$u3bC|^D>Y)t-Y6}n(3{6wnm3Ojh*73L@~M_IXAG@i@HY@U!da9%4&y>~vOe5eZ1r zPpG4ginqo6u@MkE61s)hO=W$_$sGoD_L5LPe=YAvx4-av;S6^b59cZAyYC+zTuj!b zpGR}JRRSqBQ+$ORc{5?qRW&4;u9&A=TLC$(le3lcw&T71oNbErrTHIYnr=I;QXf<7 zkcRSqql3f4LIzMo_k_+L;wk4z@DB0Sf={`ASdM&U9|;TPzRE4?QTP?j9bFH@LgPZy za5HJnGk1i}#TUa+eJR~SO?Mnc52#u|^wUi2&Ua^5Mm_ZW@FMCXIf1N3=7koAW`?eX zK7^d)baDo{m@FhysVpi!ygJexG=;Urk@8W+E_AnUqN$>_g2U`umCz#jX=-ZPp_JXA z)-B^&?6_j>ZEU33qwb*4h_l&Oky%v5U@w?s_TAG4xPc2i7d?Z#Gkk6Q?E)R)JWrt& zQ3sm~ovWuY19BftCH`s`82*}$So7>I=M87YxE66Aogv3uds%P@HP=tnBw-I!Gh+F| zDE3-p12{4#1SLss)Y7umqoB@5XxHl^dK`Q~ z9ZfY%myKzVW3AS)nqq_%`-L1*mW>?|_wg^-XmnPj1?_~~D1$r}suX$_yc1jk9K!p7 z2?5@p8?c1NP<81Yk=_i-*Obeu*J?iNk3*kfd|a`_=_xDHhNk7D4oIn-d^e%K>zf^c zDGz<|yU4Rxl4#`qM7~oUVD2#KPxdSQ%X~k)+q}EI|9Xe|Vty|$F4M>`l@Lj17Ylpk zDasz|w)i}4p5e3kleNIM*IvUh#Zku5!T#BL%RCNzn~Y`|o{c^RPi|CFi|t^~=Q1r} z*S`p(;cY-kT|6Re`mhk1cD)pm=K7egC$p1O;g)Qp3?vi2);E1f)m_q-?Tf=KkTiXCJaWB(xyzMu!Z3-RA>}AAbMhDsr*g!#pvgwPe=Vd^395sJxSqv!j6VUxbxY!s)@Pc zuj+Fb7Z;Wkgckhuc+aEP^RgaflewooSo>fEv+HX(KwBV{7 zD(YTzsBlza&%)Hg=ZfmEa`xGhmc9=D1%Y^sG`Ffj*6ZTx>3$_7lpV8Yh92XwG4y5T z-R*N{v8uB~Uq>zqx1QiyWWFoKgT1G?k3IVF8;|DZEq|DVdA$44jK?45A1%l!>h62R z3{_#yzg)4Q`RwReC*qfgtq~_!>C@lS#B(6x>+*HNT~Bo=S266DGMhp>gq(MM>_qBb zGRoXz9@Syr;F9mW^@|b;YZbm!7*}|>0PD2SU--QD)#558mrJsJ0e{_KFHEbtk}KQP zG+oCz>&$S~N6Tp$k{)uDwfeO~6GNMZUJL2L+S~m&K|P)K^bplhD%kdB18eBlV@ZAQ z-|j!-FYz}Iqy&!P#ibx~)G(vXdD9j%p_Z(W0vVyK+O89wADn1cZ`Ul>4%Y$KN>_sG z1gk%j^?J1rn_xHV(!WGJSmt}bB)vG#`?YtQx0g4k=uA;L?{V+x;$_TH+~WJfe>QMC zn1lqDD=XB;x|NgX9Cf|wp6p)W{>`mI^4&E;s&lPhMc|t2ier7xA}7>&OBW$WmBCHf zEfeJ_*@ec{8w=unn_#`jY|G6&T#MnR2^!7~?2CHlWmfgO?RZ?2v63e#=x_`4Wu31* zPPF4<#p+C6v7b7EuXUE!IEKMd61))1#CyxZ!02qo;e~BS#;L)(e1f^L*=E^l@;B_2 z7AQ-2Dt8&K~>)PkKkiV~ajlr`nua-c9x>(;@=?5LZYCgV>^0wD zINdVU?c0c_EA1jaG5rcxL-1u9AgMDEy`JPb7v;TXeFCd?y19D#r$Kuo_BR z{D;;u2#t2TWOA(wq@|jNMfHq+fmg;W)IaE5I#su0Pwj=O4ZCU&M<*JMlaz&1GuiaU ze(TNiCQOY-rWzvHOuUF2)-AOp3D}~Ou|E_N)JtCBTG2D0B*UkT%|ILfW8oGf-nZmy4@(mLv(%%;uraN`p3gB%+~)Y)#*&_6!L|M-q$%|=8KTNUFd z&2Ggr%eHqgZQ@YQhH%uWvO&Jbb*sZZ;#<^3u6;cFO>EXD^-ubMUajBN&uhQhr3OBc zsN&=_lFDmHVZYETcJNL+&3^h{HqKNS`chrot z_j%88G7!^=x!-n@_2V!#Gv|UMg86~@SWA`gJX$eJVqV}1qW1mZO7o)q)lL+*TBc(3 zV(g)3opw$yr@PaFxyrY(Lx!X`_2KU=@1h zpHKd-a6sxyEP7Iygzyvh?N3aKEKJ%F>r)ilOwoNE^VJFz8?PZ5yscKm4C}$XFku$j#&0eL`;? zi!+&K*CEubrvGlF7oD2;al8{^ydGuyx=z4S2)FbO1vmTV7 z?c$YI9KkB}tvW(~+mG+LL`^~WjHRWHU>425BkoU*OORezf|F$dLh<+bhsE-=s*Oh4 z6p=YbmBpSoD;a#hgSd&<#k6j;y<$G+9Xntanc!nuq11A#PM%zhBGZ_2xNJLMQk7A| z)EDH#2yE&k{SrOhtM+mXjZ}`zBq~m!j zV*$1#HXHF=m#>X*>dK+H6|-xyhpbly)v*BI%FEZA_8iw~vt30dOtsVLKW`J~Q*bRe zATge`*R79OZ;j!X%=xF2m)4P2_mDw%AsKF=9Y5!|OP^O^XniVkdCZh4GE!b69-7fd zBj^vg|ChN++ak^)DB~!tHY=W3F>~<4=1_C?k`JqpcRsW`=plW{xMfvm+G8`3UqyF8 z>+Hgg^;OBK*=i%%JPO5Tiqw;TzsD6K89nB#l#XK?Zel^$dp03jipW~ij z8aeQejV4D;;*39+!>Bvgk&Q0#zpr76U6Yd>eI`3M-=LQlOQrtWVGWn$+ zeW;r3Lq{tj)>oqNM3_sML5G4{f{T%5j#GC_f>EYDqSpb`wzr7-bLc+nP`?MOwKzgO z^m4sKZ^8cDpnoKnch!zW#Y`ZhW!XjMGn}@r_B*r2 zrkjM|FdVrg^v*EOr!QBmtF)qp2cVKvHYxTXIeM9`j57Bda^e)d$L=(#62v;z zj600P>m2(upHoLUBkTgxK;qOGUSk=#_MjbU@&nztpLmIx&1-Nvwgu;#w{07Yyq;1^ zrQ1wN(=lqGY(kK&u3M;nx*ri+tYh_4w8?73ux?UW>S6Ov#T)KwM%g^7eulYVE|A$@ zCW4af1amdG#7;HQXkPy%=XSJX*pbD-UAk&&sL#|bI|R3DD&po@6|WcCCb+-pI?DF4 z-RUI`t(cBnx!i=Xmw6nS@omiYrRKUUmrF$DO`D=FnG@8!hVr_aBeQW;&oKsBj-nK! za%H*=u=n++(ohZ1IdZ}@05|q3UvLt2Qvb~Xgx zGLv<{}}4 z^5A}R)|N(jM@EvXOq9847f55{Hv5RkyTM#Lh*7!EY{MFhM7gTy9CQTBuZ6ya79b`OImp3u2dZ=D<#J|E$QeEih#kA_5tV+Kqb8urnl5f;) zvdE`$&t@@D}y%NP7RSxjFT>sN19Wg*4YSh|7}J(1=wi)f{k$# z8(}Gru$gwN*=v6%UZe0u%IXHlRBhQ?wm&1Ey{L>!WrEtqRkyaH+Nze4qwAAR`Xbr* zsJ7>VTTP1k*=7XBn+xhgb0s*NBdb7$WxoLG=49mCNMzYQJWr7mWUjyDXH`cA*-EOB z6E3sZIc$}xOYT0aev>}-pv*+WK8!}U=iLuomjBD~td7@>b zO~%z5OJ>e9M^R(1ku|!i!E%~)&}Y;Geh*q9lepR^X{W2QNOmIm4X3VTGb&t&_qa#$ zQCU;$QZi!>)oLqa%OiHR479_HLUZiEm|-bnrStfYr5agH^8Pc3fB%*YOvsmcrG~_4 zEA<7xv;2glddN2872l&qrcxWW%S~F@EhT(yAl0d(QyG6WXN0pD?`;JA{Sei60mt|KTc literal 40724 zcmW(-1&|a;)2;4tcU#BZIplD6?r>c=;BXFix5GKy-Cb{ScXtlg)$wVWp6;HO`s@3n zg9vtGcBZl_^X03|m(#gT%a$bpoYSL8_ZCC`nwVt;0AT33nGL|)D*)iY3jQ20aR5NC z84b2!z3{_$4&DYIjQs-Y!-|lH=fF2`4lD(~f<3S`+zCemKbQ)Rz-_P*s0xuYvWzAn+fQ?at zco62nvEV)U3hsgvpeCpa-=fzq13$qA@CmJArMy@s@h5q zt!Rw{u;RD}Z-9BhA81EC~jHKIqvC zcw}`A>;;$zR)S1W9(IHe(RtQ{N%TrHERyfZHJ||Jf*e4DE?6t<3YY+t7>jkm?_)Ih z6KKFnm<>s^7q5W{Y=R85fo1Ry_yl$ZW5G!nl-)267lL)bfZfGbVr#({xC{P-)xa2F zL%X#DKZs=`-u!>Mv#xrvO)O1Z9F zNq#2Hm##_&WC7-ay4Wd@nb9&-m-+a2V=X6SDw$YtPL*b+1u$iDNoGW|eIZ`XhgDh;9a5_W-u;TNa_JKz;~1$@E!<2`YJhd>=DN##)_jswHd8h*;B@^(FEs~)5taamp-4V1kC$hmb522? z{Ts}c*UMYc&Sju8X^%Wlf-B%s_y$e_%2fPFLh~lj*D22Vm9QY_q4era` zWT)(e!%-BigD>T-vQJ(ne}yBl;b?~zfRKDr_Mu4a1PZ|;kP7CZ)z^koz(uSxz6x)L ziwG~yA%E%wZX;_Oqu4YdlqcbDK!J6~-lG5Yfj8wp{6N|-?U6LnN3ol$VubSyVBAq zWYcH)8AXpZFpC26i26LstEUU&Grd_Te&MU@kliC!-VoCX3P$sf|2B z=A{?%4tNt`6AuO>Z<&SN!#MPG!ydppD5k$7Bs_+2y)Rq~A=;-jSQ4Fi52yfUY%_}O z1=tvD6v`MI5i+(w9$OJ*FExs7wR{t;?veCf%9D!A{n0!908`MqYT?tdu~-Znj9`3CaT<_zmGp0v>?dksV&R66KN8 z==J4L6gt6P_%|#AT4E6U2m2jtKv{e+inHFZ0L8;7`7_D{CEyU`<8x62ltzd|f%^C^ z{3})stB?Mz0X*`aIQ$ouffMB%$swH+--#!sL$V2OfQwMX&BCr>XR-NMPpllK!BVjI z2y5FQZ|aTwaG1OqW%Z>9wdbN8>V&YnC-OTzo`E021e86tB2TK0;=CPp5-W>cgkMlZ zX;Gf)0E?kd>n9Q7Rw+ZS2NNI%Ws&hHo26+f_RXk~rncj9^Rri4Q+SO7|b zNeKDsgAQOmNFu*Dj!nW&BE(mL7YNn-;D0CiHy8*fqim5X_mr#1EB^mXT1WwDG~yWs zEDvj}_^!YdJSJf~@b8N0%KeHqcsjNbevxdFMOrQ16)q>=@gR9L`BbPZPsG|NZ>yB* zd#Y^JP32DIX{A#+3aiy`~RU`|{_UUCrSqkEta_60rNu`Dn~9x6SR2Fn-a zV<@Ayf`j0A#NQ^vZQv}Hg*U_rl!br5JzzN&MSnp6H8e_XL|SwqOhQ}$VaZ^*4?52> zAc}p)AK)8Nb{URs!EPeoT7oab`(bNgYx$dWUAimHm1oGbxI@?`tP|fzWe{^~fug7{ z+TlHT1I1RwX~kcPT>K(d48-MyD2Mk&F+M}4Bud(g{5}=2y-6tN^#iTJ8^i!I;TE)3 zJIVqoa108FDLqD6X*fFR*~q>}coW4iyg&942F+74ixGjpBJ76)^0aIWlkT0{?K)fqXVlqZ!jR1!Dn*yfFSES=8 za=%N{(YtklpHWV{04;DBtOVWy39Lu&c?spCkyrtWOEbdQohXCWg#RF(F&VKFNxCex zl`2Tx5KeSMI8+%-fQJ!l%aey81~C?R!D7UoegzkR8k>pUV;$zk?tpZl2OGhCWThKC zgi2T>b&#$}9Z<%pCI2T4meeS69pC}DfUU$yytiV4Vz^?pVy2>`;yHd5-;VpSoT6JHc+OIX(+tgBY<7N8A!tle>!_lh3&(33J>>t%!lh z=kR8tL*gW76Y2p4*3Q+$eTd;tX?;nZ*3YEM`~+GC9m_W)<^{Q76(8Z4(%~hJDIfxm>Pl@{uq> zlI5`=fz4HXR7$F(x|OD}W}2E&>J_hnS$>;Ta{ZXE#MiJXurUA2&#HMn11-oYY$Nd$ z_C-6@dfWFhYglP#1!E;^h2^E&79W<`KCPW+f^~p)3FZ+_$Lo=Wu_w_Ekv1V)pkZKk zcyjbMxt8JCKFQr;8CXVtS&m_AJB{m*jikx&?*P0uBxW0 z45~)Tz6t_=hO)!Ia<0&vXA^R~8hw!H5@{IhT(mHc`O&Fhd}MiiNHQDNRoV?ToVxVE zB|K#v75}P~Q|@f(+!9r@x~3g)FE;m5?}utWJKl&I7b^%SgZKTD{0{^7LhGY|IuO5^ zFz{^zUap`xsV=5_tRHM_W`>p~w)Xa7j-8HDjs~`xmbRuM{aKAh*%~Ym6WlRoKlM5G zBf^L0gy)6-h*)FY$nUfzae}?f*Ath>6TlLLzV+~K_-}YKyfxktci?-mfmmsjiEWsO zcw!Au3f`3ZiQ|)NI0fs8M+kMaR`70N&B6zP48oWwl^ib}#x`hI+ApR|&MZ^nRH?P4 z3QKG#o|e@({fMujy_Mdhu!_am-SlwcM5Icnv;S7%;iBe&CE;bUj$}{zH^#(%|bydn{Cier%3nrIJy$RTZf^sb{Opt6i$AiWq(jABTII2d^ zkx6dgt8#4;Tj_Dc>d4sO!lJ^$!@*5Ndu9yxGucx9RrSV_?t76@rg+biXG;~7np)~) z$?e6*W_0)dZ5^c<0nQ1DgpRhyS_d-=pXF`Kn_qC%-!Y_(?2fJ{_Rv?^uHsOTk3UuH zR(?~r)H}_3TQmC;dppMt$4^Hwr_))_;k2zYoz>k}x$%fRLMXu=q!vV*g$4$0`ycve z21W)ag+@f?6Mx1xa9zaua4+^#!Ky^fZJnquXE>?1=xghyYu{j&RR zlf{DML~aW+o0=HA5>5%;DEhld2rh~ls0Q&qiCxJ@ctc}>qrI%AnCX2Y})hceyRrwJz_-fB_=tF z3o&n~U@Sj;Gcdh~C>-yf7#bOULsX;BB|1rC)XmKy$6D{$v@@Cc#s4Z*xAdh_bxZ8b z;Jp{@7L!xE3?C}CO_+%p!QMsM!o>wk3)&V`DQHl*&F=_TApG>Q#CooO(kS)7nyB2` zn)Hc_W?*o@{r-+1LKsyhZ;^bql168ZI?nj2IOb{Vxi;7yeRo z+dnPXFgzeyjrax_4;F@hMn*;J5Z~f0#YxJ}`cCFL_OQ#EGBM+5u~_l7#iwR{ zPD}Ou<63QVns#e;VOxdF#2xZzv|o6Buyo)=(W=7AMNI>x!Z1n_S=1t$XU_1qrB$E> zUQyXe-A;GXxWn?+Hs5j6dBU~QUCT4ov)?__InqX&y6CT|%PX3~cgdBB@zl&{g)kN> z5xf)_5m*v95_}Ne9!sGGW;oYd5F`-{Q#R4m)UDBPF?2TO81EQQ8*dp#=~rmus%naj zK$c1f&sdUn6Qje&1DV0gp*G>Y;e+8Rks)L~?zlWhaayxiKgS$!-1c5h^JOm2{E=}u z{Znf3l-k~rPQqd}OjGRx+2WrZj04IVI}y$e?hmvNo(p{ozl}_d&X4UTH^=9&hxyim zLuv-g<3m(kwBPhf(?jzJYbkq?V~ES@-sbAy9B5x*y=Wd~?5Y7_NM&8m2j?-K*?<) z@2=u3ZtH4lt81c?v1YKp*nsyWn$u0l&O~`)I5Cb8Vsm2eV(rOcbW^5p0uf`iE+Q~topU$3wR-2OD(vOMn>sRG?cAr`-`scI-(7^G zuC1gwSHD@aTX_eYE_W2xaODyW;!1iLWv5P)P027(lk7>oq^0-?reUHMdx2Y)Tq+Kd zZ^MVb~dw% zE>6*el{gZ68S6uoBx_R3=tO)$qCeL>X%%y&eNc_1;x0vz;t%CQWi#al#T*>nF5ywp zm899NiD%50_;@;->PpR_J0*VO_XsuQQJ6;cL;KWN)f%=NUBlh|Jx4stJk>mY*DA*q z>v7X>`lgzl$~oAtas{Cs!lEj1JKc#|NuDQa6UB++go^UeJLn2=H#0P0<0$^F&|Z2k z-vGIIchy%-O?`Fa2h$Xb!B)fm(_X{zuYIkpigl!UveBh)t!b*PfK`#F3;Eo%#6NMG zIzzr7nh*_%hr}LoBXxlO8E=vJ${yi=7jngBsP48L%)%PtxA8)B*LLFuY#-b&RTg}F zg8hpUt#DMD2hJ(lX=)qpnU~vAoMqic&tp$BZ#VA=5A9m&xN5y> z`mRsaK2~!H2`RI4GIRMwkM+In|wf zM4TYL5M{~J zZZOGsIi?3&D>+?6l}^m4T&O8wSY_U7(>cGp>U*wvT6kA`Z+JSoYda^~mYFXa?rYbo zy5l&!EEI8*5)MePP$d64xu~wF-KzgZH)LeQ|98a8Kr}8Oi4Fjd5TpC-TxTxx& z*{7|i&oU-W!!7NsH?3c+i>yZL5=&)s19Wc7G#b@uyare#xda{Gh+WRqh&<3 zR1xK)m(f+?7=tB>v1Pe)+!#JH`80V_$QJL4DX1Q`NgN=~77WRw{1N^pzlZd z5BMQsb+`lU!4IkyX>sEwOSXNwGuwT~)5SZ{yVNUqD!D&8O50Rszh13PQ+>uJ%aeuM zT#dx@I78p1_K-h_Im9tyCb@vBLa(Ar#p^L65}(=4JP_1khO|ch3tYiPpG4mcifCOyJN;hjJUMlme(5_%^;a4on5H;C`WyZHCqDxO2^>;mRj zv{!%9elaw*jI=*+&T((^#66!pZ#=_2b=@h>YPK3?sDGxBlpfqKlR`zlT!Nw>Q@Lao z*`GKV>leEiJ4J}3moB84xG6D|?ZCGb3d9QX66nI7;`fyY)IT&j-B|rS!yV%$Q(f~k zb6>OCJjS%nc**cp-%9tpX0hs!;w`2C<>YK}Yw|HykNv^yh=cfO`V`fYYEI3eIBE~w zGv0<7otTSoY9GHnxmKty&P3G}NoXdNPUdlQxvShO?gm$lgKTyFlyFL(j@?yMR5#T1 zH?o#Y$8=X8&)?pJm-T-3uJZQrgq(K!5pxYg8*LfYRcx-DEmT6cp62v((o2pb7R6%G z!sznYTcQ~?njRUS!E8xbxy+3JSv2n%~e@=Cv-jUtO1Q8+@kt?XnbU3a}WUy1Y zNBr~TL!qO1SPY17k!@Rqc4#-QuxHq1+yL$w`-~mI?M;@ITcPaZP}k8dFxIrra$xQs zo>D&1r%LJPo8@ibk(^1}9`k?t6wM9A06>Ws_)7_azDaf^?#G^@=UOZlJ4yUTjf^)) zOkoFd{rKfcvv^s0D>p^`nc83kM&Zkqvx? z>6KI{ot4azRot0;&5dBw*+uL%bTb{!u4Iq%HKcs-UC~&5NBh~(&ywP3>dx^T_Ez;h z^%nPb_Iz=za~!hzOcizgRYlk(`K0iY^CouEd&#*(KjIrPjuc4;HJx(CWE4HI<_ z)Rz>`!F}mjaxwR3qE5UMb&EI^>lFJt=8uga(nufGll~gtlNiLs`8q;H5tri9bvX?- zfScrf(k1Z^;XB`o6B2I|``8!kYxZw0$(RTFznNM18+B2MQoQIs39G&fbt&Hh+Ll149G6VZV zdcfaHjE;Aw_7Wvy%IJxRDjJBEC4NI(;354tGl6Zue@wO!=Sd6Xl5hgNje0XLQ62W8 zyiyvCqVoYaj%~(v;5zd4`SN@*{;yKXNho)KRWuNt?;Z^!2)Zn1VwX3gAD?gKwZ z=!2>h^T1KOK)F$)F?=@Fu(q&&cgW7|t_iLq&Y_M*woT@KhOwH{iou|=^q$|8I7*iw z7sj$;rdTBUI$9=HmAFYBqsKA}5_Q-<++~!{nu`@soeE0Bi3##T~FgNORmG{)ueb+2Bv_NJ>IKsowKg(vgv}Z zo$4ysCr;%}?2q_oYAVXnbz*H|TZx&JF}^L{B3?Q^g{jOfN-Bi$$@azkx^8=Blr_ijlF|=u zA@hXFCmzO1#acyohUIXdXa}-*+#7#D*{Cvf*TffIDQryEN*)lt!J0}2Rd!mNLKdS# z;ok2Zk)llfl=9B2bZ@pVGH=rFRAcy4aTL3VDj(YxQAHMnkA*siYKLb>Uz07E^Q?;} z`9Y{Q)dN04mBZ2CF6v_}L;ay~;5+Icb%DoGMRJntmfK6+gluj+)16u$Ef=X0D^BK; zZxQ0uh^Gn5m8FcutY#EnuTrF}rlt0k`lrOhtoG?zU))*BCK>jsf5q#GKK2GZf$T#h zV|`+ZXuYsH1i~|;`-z_9k65j!JGPKIl{m*^!Y5&*{0&dntu{ZgSzM%NO3IqFr|HIw z@#(8mqu!0qJoA0sXr&)67DgsIk^b<)z&ihhK=t5~V8@_8_$hLPT*Nfvw(xmLLG;00 z*ce4=<$C2hTH$QnrP)q$` zUHpp@*L2XF)a*nx*Ik;Cn$nu}>ZI}vJ`emO*@O*z^~75G&)C%9>w*b+O$%p+=Mg2S z$y90T5kErP&SCbx@*PTXrI##zxb(`>Gm5v!Xq-~oUENmFxJ|Q0u>mZU&T}W|SH#fR z@|c$>6TKLGQ%Dwc@plRpk60s9g5lubv7d=mQUSQBP-{9EEVg}aMM}N2?DUf9SJL04 z&P^HRo#Oh_E|@a46Y;r{FS&so9-m6A4)yZ)E{YUg3Q)nEAQ7w|z903)7qi8ZvJjDu zfn|t2wAaqjEz|EX>@i%^H`lM%b<)mMe^KlOWhG1UEbEOEv9e)r;mn_bpOcIFMRJH0 zLOKbQN1+KGl6qhH<)Wg?o%oN*SD1H9eBnD6MqL zL-)TI>HZ0P`HE-3pdaYsyI4IuZDka9zXJh9=YYH=e7XMznU`eDo`3bSi zZ*c=WW?Jg`C*^!e}iRhR^yT`b9ckQ%b#B`5vzdUP|MHeXN3!2_al2FeIP(KJUA$U}JGv*{l zSR1AsO8=3uA@g|F^Af+7&M7e@Yf|RUl!lIPCQ8>6RmT5Nv0ykKrE3y5BVQxEBI|>b z3miYMe}DAz-@;_*Y2DOk%@uph3^W!7rqWG4cXyZi}L!;d!ygz6F=Bwbn<=Nn?pEfFed&(lG$x_=0 z_1kn;)gQoVUZi!zl1RNsgU}-XtHKtA$|BS@39X48pdYapl9$EHu(6_ox`duHj5j3p zFZI{;oeY@KV_>w))Ke5QzyT>+7{INGmm@9)ALm#6ar5&(U(Ob_h+*-Q@f5F z|I0X-smkIrk7hL}xw3>UYhXsxbm*OFo2nnC_9?F_Ppbnkg`G`WBin;hgEfMWi`wUJ z`1$LP_>bm=_ab&`HZz4!k#iJ(>1^it_J*F0DHT%d_!hgDc<%TbrBw1hb1X8i(3jIr z*JzcLd^tHdemIsAp@M0F{(iA2yC}ooA(#l82osZ`1 zrKffCbZ@mgHF>Hniiu!_)R5oB?4wFWZu+a|VPE%sYW1D>8!2zXkmwp$h<8neDZ$Ly zS=TcE$YP5pOO(kPnd(Xf=`+2DtrxTh@Buhf#uc|DUE)eq@E7-I`*#+7Ey&5O^rO^| z8F@_tC1bzG=khAJMR`fPQ9sz6;~4L~ojN16oX77}x!=2ccptbej$DKh3hfqU0AXo) z{yuFXR)y{t?J2xoaHH^pe`By+cuBMwd5yW6d;;wXTq!7Ls|Rbl=>IgN7;5TQ>UL;< z(YDY`P`6TD!H+|`IE$;ugovu4X9abBH26IB%g}=Tv0cmvc3a{NXH&XdYqJu?6vgtg z))sqP;#4t3`q`9^sV`F3xT+aciluS{`=)eYm$?3fI`ql^16BDa6#mFJd zWyZ!-+`BpD*VHAhI;Qi=USK;mMcG;LLpVj(icSfV{@q1b(cgvU!Z8K+@}mVXScyE5 z_(LkfI;t1zV)}OG1CE>CifK<$wcf)n46(o7?uCvMwjO3yw?@4cUn`E{7-k;bmyCor z_)8b`%hwk1!3NQ7L^-lMrDJCDB7Ca+OVdUh(sb3l(+t-Q*T2?J&^OoquCAxNjPB93 zp;Ept1ld2xX2D2-BJcW-+@IwF?}*xoYg{&Wj!#!YXZ_6U#mKBrX`|EgGDl~&P5b8i z)5m!Z+gBJE#a?&=E2Hp%Udd7PjmYHScK_(26NSH{=X}1e;CHmYyNL>HZE2>Witd`> zi}96tjfn>(%W zx6mMT>$pljqkd!dCD()fO1*ZT&Zw=gsjAJ?{-rIg+oVm=j8<*JQ=u&VAqS+tq=ZZucd zl)+{~H?|e)C}(rE=tJS&!GHa*utiaaqKbu?1$7JB7nwp7VccpwQEdm`gb(ae=4ZSTIVAkD$d!LSZ(d=WV7tiL z=sKb|^*pW;rh&)GLXBGYLi?}wopz5FYVT+_Ylmyws|#=d3gmtAY`K{mVAY~Y zdA`EW5lH{V4(2MbW4JbWV;i3GDYa*M<@8=@VtUE651uP-#(U8FkNv&QrdS}Gz#IHM zT*@1nD3Ki=9tfg1s$aCZXjGBCz+ccbFgr4l)(frhe9bP@!MS1T@3`l!nlUqDY-+%> z({<0;!dc1@w^gtlF}Sr2ab7HuJj~u@uF|bzD+74p(Y&|$ef>v5=OZhk)rcF^;6zl2 zV2xBwH1oBCG+Q-FZBp}E(_ZsZ{X%w1reeNkUhFTpi<-c3dsGxSyxM1I~6m~?}7)Pz)X37G-Ta~Tp zp`ENdY@rR^XQ=OXL5T znz=9eMx|a(yOO>)Bc8g&d)TwkS2ks>E7e3Q&7d(-38ujZe4M@>I~cYG3JbCd>J)S+ z@D{$!FQ0$0ut{hPaXIlp%2mwJT+w|q+O7TF_0!3WN$KBGe)nE-8{DN`zd62IyO?x( zURf5HrA5iT++PU?y)}9{(5LWBfn4|{U=L@9?}RHwo0EeRO~oiEul%UmtG=l$#>AMy*{s}nmtoL8UGVYR}^AS`8A&whtc8T9)T)F?+S^+Vucs-kK|V@TiGSfe@>kGE(N}dwJrH?Y8*OEssC}jOBd#zW zX`}*TPO=C4f)XN?L)uU+a$<6)oB@_fpT*<&D`P{~KyQ6-zBlBXnR+WF=rTFhyRLbr zxdiid^%igk9>s##ZgB*=nK~9p3le@3dC&Ruu^DP)&qGf=PsF{#wb7no`Q2Dt_or$b05_%|6v5g@jE9BVZM|j*!I_kJpQB zi8Lc2yI2B9AF7ic$mcYLwgUHS)U`_UO!Q=U54(Rj?2b`R%Hgwg(lK}sq;uR4%E0Er zf&@;*BEv(r(9htNz%0ME=zc-F!gl^I!8*~B^ZP zb43m4j8{;bjIZruy?s;e`TF|)@eTKF^yaxwISyNUnj0G`Xu=5h?@4pSu0n)!GY3gU zjEJPf%ES&w^CREGzR0sEARE$?nG>kT&{VKV1LR%Mip|0&DK;r?;sdbx;5kyJw~=Zm z)rlr_H2yEYP`;1tMY>26vZl~B!2Qvi=Bwz-N%@|tNy&2s?a+S4e$HCn&_}UDUV{|8 zN2KCnOTH%4gNhNk(f84jvAt0*^7+sHU;VTEJ%fLT7st|>$;sxTO{TERnj_}d&d;7v zo|f*No~zy=-UFTj=WE+a%V(ogr&O)SD#DO7OWcsG$ZF})M6cM2*Z|@zaX9uOQW+^^ zwvfGPCG(uwn3%=&OO_NS2vfz*@*AXV>b|>?J*qy=8sVd^=P6r~r7F@yp$oq|@gL=n{Skc@ zkt55Zy`p!+0d#-Z5j-1w5(-9evKv#CuO-Yv{nkR|XG4zdr_=26x!$^ucq{rkc;~x2 zI?LEETQFmFO}64aI3({Dy~*8)iu4;|du(K^89@;Bh|;mV=1TRv6O}Z&2lKOz@vTk6;e! z)cc}N;wjaOnad95{}T>_Z|YJ;-rU7{+qTNF-1QNiPfyQ$x65_f_QK@SKUWV@_QqGi zd%|V59#Z*KC*9;0@)TK>oPeqpqlsUsIY^B)JuxpaD=~w;#LeT4$wSGB!tdfvv9WOYa&)?@Z3wCfpp;OU{9r$|HQWN1=?ACcsyL)@?`97sKN_F@Kc{aM2x^(tD zV~*y4Vi%gb=qDS52CRfsCI_S6qX$v1>7VGp@PXjc;M&llaCRgmT03@*Y!u(Y2#FAX zRBoxPqAPCPW$JIKZdbdfdVleW-lv|6F2Vkv<)X2?{*q>((gt#cGu-L~6Q57FqOOo6 zsi9_)3KAzvQJL|9%sQqslg12Yh9nlSPHq)fhTo2KfY~iZ6L+Cr z=Ooo@MGZxoW`b#z{fo1K>!hoshw=4J>zI1VcihMOwt9bY&9DwLEYVP^O-e785EpQl z>88Z_=#FUdSlt*OIUf2ukRRw9E*C8u`#p9c)|4DT!+80Gf*US)!BVAHvrD^2Pnh1= z%&tQBpB~mNy2d#FwO_a1HUDY+tR16dVHS$U-s}sr6Iq@Ou?N`^>;g8)uHk;=pYR>{Roq54jaw^@#AYkssLHD6AzWJ%uQwREvdLMZ&dq%s;*}fUJXq}pSDhA&z2lxeyKz1T-#VW+kN9#qN z267jYX7Q4I8Xs#{`$(^~PP_my+g8ms54A z3sTcA?+JyR+#+NY2Jy;yKDO8n5A#d4cVneXwJpqlDwSy@I2WqoUn!d24u#I?_P# z7(}GC$)T)`nMsS}W5PuILWGHSWDBw(xr7`^Md+5yz(hB8J$skUV+D3S*Asc%A#NJW zFe%Jfc6)NDbOg;j9D(OCwR(o(l2vq^b6s_F-f-&JbW29XjJN4zT7^{Lo#jZFM;OcK z2WavY7|cixOnjhQB0Wql!lAX1aEK3DLo36-M2n&_^1qp}QbcdE2Hl-`z%ENZ5htN8 z(mwo*iqn=fzBM1P&bHmRb+_9bna&5!JjVcAy4hiPt7X-+(ty{HW6923sYK8C7wQgK zpR7YxA{P=4qCQcBd`kThAHwWMdeq_x9b3qTxN>|6E-z7&@x~v