From 9c9426fb1156ed0c7efeca229aadf49c315812ef Mon Sep 17 00:00:00 2001
From: "ci.datadog-api-spec" <packages@datadoghq.com>
Date: Mon, 16 Jan 2023 10:01:49 +0000
Subject: [PATCH] Regenerate client from commit b299f7d6 of spec repo

---
 .apigentools-info                             |   8 +-
 .generator/schemas/v1/openapi.yaml            |  77 +++++++++
 .../model_list_stream_compute_aggregation.go  | 135 ++++++++++++++++
 .../model_list_stream_compute_items.go        | 146 ++++++++++++++++++
 .../model_list_stream_group_by_items.go       | 103 ++++++++++++
 api/datadogV1/model_list_stream_query.go      |  78 +++++++++-
 api/datadogV1/model_list_stream_source.go     |  12 +-
 .../dashboards/CreateDashboard_1039800684.go  |  61 ++++++++
 .../dashboards/CreateDashboard_2843286292.go  |  67 ++++++++
 ...s_pattern_stream_list_stream_widget.freeze |   1 +
 ...ogs_pattern_stream_list_stream_widget.yaml |  42 +++++
 ...ansaction_stream_list_stream_widget.freeze |   1 +
 ...transaction_stream_list_stream_widget.yaml |  42 +++++
 .../scenarios/features/v1/dashboards.feature  |  20 +++
 14 files changed, 780 insertions(+), 13 deletions(-)
 create mode 100644 api/datadogV1/model_list_stream_compute_aggregation.go
 create mode 100644 api/datadogV1/model_list_stream_compute_items.go
 create mode 100644 api/datadogV1/model_list_stream_group_by_items.go
 create mode 100644 examples/v1/dashboards/CreateDashboard_1039800684.go
 create mode 100644 examples/v1/dashboards/CreateDashboard_2843286292.go
 create mode 100644 tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze
 create mode 100644 tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml
 create mode 100644 tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.freeze
 create mode 100644 tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.yaml

diff --git a/.apigentools-info b/.apigentools-info
index e79497afae3..bd45adf8cf4 100644
--- a/.apigentools-info
+++ b/.apigentools-info
@@ -4,13 +4,13 @@
     "spec_versions": {
         "v1": {
             "apigentools_version": "1.6.4",
-            "regenerated": "2023-01-16 09:24:31.725966",
-            "spec_repo_commit": "7366dfa9"
+            "regenerated": "2023-01-16 10:00:42.109665",
+            "spec_repo_commit": "b299f7d6"
         },
         "v2": {
             "apigentools_version": "1.6.4",
-            "regenerated": "2023-01-16 09:24:31.738986",
-            "spec_repo_commit": "7366dfa9"
+            "regenerated": "2023-01-16 10:00:42.121633",
+            "spec_repo_commit": "b299f7d6"
         }
     }
 }
\ No newline at end of file
diff --git a/.generator/schemas/v1/openapi.yaml b/.generator/schemas/v1/openapi.yaml
index e746bbf9a35..c9ba7b9d83f 100644
--- a/.generator/schemas/v1/openapi.yaml
+++ b/.generator/schemas/v1/openapi.yaml
@@ -4002,11 +4002,86 @@ components:
       - AUTO
       - COMPACT
       - FULL
+    ListStreamComputeAggregation:
+      description: Aggregation value.
+      enum:
+      - count
+      - cardinality
+      - median
+      - pc75
+      - pc90
+      - pc95
+      - pc98
+      - pc99
+      - sum
+      - min
+      - max
+      - avg
+      - earliest
+      - latest
+      - most_frequent
+      example: count
+      type: string
+      x-enum-varnames:
+      - COUNT
+      - CARDINALITY
+      - MEDIAN
+      - PC75
+      - PC90
+      - PC95
+      - PC98
+      - PC99
+      - SUM
+      - MIN
+      - MAX
+      - AVG
+      - EARLIEST
+      - LATEST
+      - MOST_FREQUENT
+    ListStreamComputeItems:
+      description: List of facets and aggregations which to compute.
+      properties:
+        aggregation:
+          $ref: '#/components/schemas/ListStreamComputeAggregation'
+        facet:
+          description: Facet name.
+          example: resource_name
+          type: string
+      required:
+      - aggregation
+      type: object
+    ListStreamGroupByItems:
+      description: List of facets on which to group.
+      properties:
+        facet:
+          description: Facet name.
+          example: resource_name
+          type: string
+      required:
+      - facet
+      type: object
     ListStreamQuery:
       description: Updated list stream widget.
       properties:
+        compute:
+          description: Compute configuration for the List Stream Widget. Compute can
+            be used only with the logs_transaction_stream (from 1 to 5 items) list
+            stream source.
+          items:
+            $ref: '#/components/schemas/ListStreamComputeItems'
+          maxItems: 5
+          minItems: 1
+          type: array
         data_source:
           $ref: '#/components/schemas/ListStreamSource'
+        group_by:
+          description: Group by configuration for the List Stream Widget. Group by
+            can be used only with logs_pattern_stream (up to 3 items) or logs_transaction_stream
+            (one group by item is required) list stream source.
+          items:
+            $ref: '#/components/schemas/ListStreamGroupByItems'
+          maxItems: 3
+          type: array
         indexes:
           description: List of indexes.
           items:
@@ -4042,6 +4117,7 @@ components:
       - rum_issue_stream
       - apm_issue_stream
       - logs_pattern_stream
+      - logs_transaction_stream
       example: apm_issue_stream
       type: string
       x-enum-varnames:
@@ -4050,6 +4126,7 @@ components:
       - RUM_ISSUE_STREAM
       - APM_ISSUE_STREAM
       - LOGS_PATTERN_STREAM
+      - LOGS_TRANSACTION_STREAM
     ListStreamWidgetDefinition:
       description: 'The list stream visualization displays a table of recent events
         in your application that
diff --git a/api/datadogV1/model_list_stream_compute_aggregation.go b/api/datadogV1/model_list_stream_compute_aggregation.go
new file mode 100644
index 00000000000..2a23e2c3b48
--- /dev/null
+++ b/api/datadogV1/model_list_stream_compute_aggregation.go
@@ -0,0 +1,135 @@
+// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+// This product includes software developed at Datadog (https://www.datadoghq.com/).
+// Copyright 2019-Present Datadog, Inc.
+
+package datadogV1
+
+import (
+	"encoding/json"
+	"fmt"
+)
+
+// ListStreamComputeAggregation Aggregation value.
+type ListStreamComputeAggregation string
+
+// List of ListStreamComputeAggregation.
+const (
+	LISTSTREAMCOMPUTEAGGREGATION_COUNT         ListStreamComputeAggregation = "count"
+	LISTSTREAMCOMPUTEAGGREGATION_CARDINALITY   ListStreamComputeAggregation = "cardinality"
+	LISTSTREAMCOMPUTEAGGREGATION_MEDIAN        ListStreamComputeAggregation = "median"
+	LISTSTREAMCOMPUTEAGGREGATION_PC75          ListStreamComputeAggregation = "pc75"
+	LISTSTREAMCOMPUTEAGGREGATION_PC90          ListStreamComputeAggregation = "pc90"
+	LISTSTREAMCOMPUTEAGGREGATION_PC95          ListStreamComputeAggregation = "pc95"
+	LISTSTREAMCOMPUTEAGGREGATION_PC98          ListStreamComputeAggregation = "pc98"
+	LISTSTREAMCOMPUTEAGGREGATION_PC99          ListStreamComputeAggregation = "pc99"
+	LISTSTREAMCOMPUTEAGGREGATION_SUM           ListStreamComputeAggregation = "sum"
+	LISTSTREAMCOMPUTEAGGREGATION_MIN           ListStreamComputeAggregation = "min"
+	LISTSTREAMCOMPUTEAGGREGATION_MAX           ListStreamComputeAggregation = "max"
+	LISTSTREAMCOMPUTEAGGREGATION_AVG           ListStreamComputeAggregation = "avg"
+	LISTSTREAMCOMPUTEAGGREGATION_EARLIEST      ListStreamComputeAggregation = "earliest"
+	LISTSTREAMCOMPUTEAGGREGATION_LATEST        ListStreamComputeAggregation = "latest"
+	LISTSTREAMCOMPUTEAGGREGATION_MOST_FREQUENT ListStreamComputeAggregation = "most_frequent"
+)
+
+var allowedListStreamComputeAggregationEnumValues = []ListStreamComputeAggregation{
+	LISTSTREAMCOMPUTEAGGREGATION_COUNT,
+	LISTSTREAMCOMPUTEAGGREGATION_CARDINALITY,
+	LISTSTREAMCOMPUTEAGGREGATION_MEDIAN,
+	LISTSTREAMCOMPUTEAGGREGATION_PC75,
+	LISTSTREAMCOMPUTEAGGREGATION_PC90,
+	LISTSTREAMCOMPUTEAGGREGATION_PC95,
+	LISTSTREAMCOMPUTEAGGREGATION_PC98,
+	LISTSTREAMCOMPUTEAGGREGATION_PC99,
+	LISTSTREAMCOMPUTEAGGREGATION_SUM,
+	LISTSTREAMCOMPUTEAGGREGATION_MIN,
+	LISTSTREAMCOMPUTEAGGREGATION_MAX,
+	LISTSTREAMCOMPUTEAGGREGATION_AVG,
+	LISTSTREAMCOMPUTEAGGREGATION_EARLIEST,
+	LISTSTREAMCOMPUTEAGGREGATION_LATEST,
+	LISTSTREAMCOMPUTEAGGREGATION_MOST_FREQUENT,
+}
+
+// GetAllowedValues reeturns the list of possible values.
+func (v *ListStreamComputeAggregation) GetAllowedValues() []ListStreamComputeAggregation {
+	return allowedListStreamComputeAggregationEnumValues
+}
+
+// UnmarshalJSON deserializes the given payload.
+func (v *ListStreamComputeAggregation) UnmarshalJSON(src []byte) error {
+	var value string
+	err := json.Unmarshal(src, &value)
+	if err != nil {
+		return err
+	}
+	*v = ListStreamComputeAggregation(value)
+	return nil
+}
+
+// NewListStreamComputeAggregationFromValue returns a pointer to a valid ListStreamComputeAggregation
+// for the value passed as argument, or an error if the value passed is not allowed by the enum.
+func NewListStreamComputeAggregationFromValue(v string) (*ListStreamComputeAggregation, error) {
+	ev := ListStreamComputeAggregation(v)
+	if ev.IsValid() {
+		return &ev, nil
+	}
+	return nil, fmt.Errorf("invalid value '%v' for ListStreamComputeAggregation: valid values are %v", v, allowedListStreamComputeAggregationEnumValues)
+}
+
+// IsValid return true if the value is valid for the enum, false otherwise.
+func (v ListStreamComputeAggregation) IsValid() bool {
+	for _, existing := range allowedListStreamComputeAggregationEnumValues {
+		if existing == v {
+			return true
+		}
+	}
+	return false
+}
+
+// Ptr returns reference to ListStreamComputeAggregation value.
+func (v ListStreamComputeAggregation) Ptr() *ListStreamComputeAggregation {
+	return &v
+}
+
+// NullableListStreamComputeAggregation handles when a null is used for ListStreamComputeAggregation.
+type NullableListStreamComputeAggregation struct {
+	value *ListStreamComputeAggregation
+	isSet bool
+}
+
+// Get returns the associated value.
+func (v NullableListStreamComputeAggregation) Get() *ListStreamComputeAggregation {
+	return v.value
+}
+
+// Set changes the value and indicates it's been called.
+func (v *NullableListStreamComputeAggregation) Set(val *ListStreamComputeAggregation) {
+	v.value = val
+	v.isSet = true
+}
+
+// IsSet returns whether Set has been called.
+func (v NullableListStreamComputeAggregation) IsSet() bool {
+	return v.isSet
+}
+
+// Unset sets the value to nil and resets the set flag.
+func (v *NullableListStreamComputeAggregation) Unset() {
+	v.value = nil
+	v.isSet = false
+}
+
+// NewNullableListStreamComputeAggregation initializes the struct as if Set has been called.
+func NewNullableListStreamComputeAggregation(val *ListStreamComputeAggregation) *NullableListStreamComputeAggregation {
+	return &NullableListStreamComputeAggregation{value: val, isSet: true}
+}
+
+// MarshalJSON serializes the associated value.
+func (v NullableListStreamComputeAggregation) MarshalJSON() ([]byte, error) {
+	return json.Marshal(v.value)
+}
+
+// UnmarshalJSON deserializes the payload and sets the flag as if Set has been called.
+func (v *NullableListStreamComputeAggregation) UnmarshalJSON(src []byte) error {
+	v.isSet = true
+	return json.Unmarshal(src, &v.value)
+}
diff --git a/api/datadogV1/model_list_stream_compute_items.go b/api/datadogV1/model_list_stream_compute_items.go
new file mode 100644
index 00000000000..8c8fb07e167
--- /dev/null
+++ b/api/datadogV1/model_list_stream_compute_items.go
@@ -0,0 +1,146 @@
+// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+// This product includes software developed at Datadog (https://www.datadoghq.com/).
+// Copyright 2019-Present Datadog, Inc.
+
+package datadogV1
+
+import (
+	"encoding/json"
+	"fmt"
+)
+
+// ListStreamComputeItems List of facets and aggregations which to compute.
+type ListStreamComputeItems struct {
+	// Aggregation value.
+	Aggregation ListStreamComputeAggregation `json:"aggregation"`
+	// Facet name.
+	Facet *string `json:"facet,omitempty"`
+	// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
+	UnparsedObject       map[string]interface{} `json:"-"`
+	AdditionalProperties map[string]interface{}
+}
+
+// NewListStreamComputeItems instantiates a new ListStreamComputeItems object.
+// This constructor will assign default values to properties that have it defined,
+// and makes sure properties required by API are set, but the set of arguments
+// will change when the set of required properties is changed.
+func NewListStreamComputeItems(aggregation ListStreamComputeAggregation) *ListStreamComputeItems {
+	this := ListStreamComputeItems{}
+	this.Aggregation = aggregation
+	return &this
+}
+
+// NewListStreamComputeItemsWithDefaults instantiates a new ListStreamComputeItems object.
+// This constructor will only assign default values to properties that have it defined,
+// but it doesn't guarantee that properties required by API are set.
+func NewListStreamComputeItemsWithDefaults() *ListStreamComputeItems {
+	this := ListStreamComputeItems{}
+	return &this
+}
+
+// GetAggregation returns the Aggregation field value.
+func (o *ListStreamComputeItems) GetAggregation() ListStreamComputeAggregation {
+	if o == nil {
+		var ret ListStreamComputeAggregation
+		return ret
+	}
+	return o.Aggregation
+}
+
+// GetAggregationOk returns a tuple with the Aggregation field value
+// and a boolean to check if the value has been set.
+func (o *ListStreamComputeItems) GetAggregationOk() (*ListStreamComputeAggregation, bool) {
+	if o == nil {
+		return nil, false
+	}
+	return &o.Aggregation, true
+}
+
+// SetAggregation sets field value.
+func (o *ListStreamComputeItems) SetAggregation(v ListStreamComputeAggregation) {
+	o.Aggregation = v
+}
+
+// GetFacet returns the Facet field value if set, zero value otherwise.
+func (o *ListStreamComputeItems) GetFacet() string {
+	if o == nil || o.Facet == nil {
+		var ret string
+		return ret
+	}
+	return *o.Facet
+}
+
+// GetFacetOk returns a tuple with the Facet field value if set, nil otherwise
+// and a boolean to check if the value has been set.
+func (o *ListStreamComputeItems) GetFacetOk() (*string, bool) {
+	if o == nil || o.Facet == nil {
+		return nil, false
+	}
+	return o.Facet, true
+}
+
+// HasFacet returns a boolean if a field has been set.
+func (o *ListStreamComputeItems) HasFacet() bool {
+	return o != nil && o.Facet != nil
+}
+
+// SetFacet gets a reference to the given string and assigns it to the Facet field.
+func (o *ListStreamComputeItems) SetFacet(v string) {
+	o.Facet = &v
+}
+
+// MarshalJSON serializes the struct using spec logic.
+func (o ListStreamComputeItems) MarshalJSON() ([]byte, error) {
+	toSerialize := map[string]interface{}{}
+	if o.UnparsedObject != nil {
+		return json.Marshal(o.UnparsedObject)
+	}
+	toSerialize["aggregation"] = o.Aggregation
+	if o.Facet != nil {
+		toSerialize["facet"] = o.Facet
+	}
+
+	for key, value := range o.AdditionalProperties {
+		toSerialize[key] = value
+	}
+	return json.Marshal(toSerialize)
+}
+
+// UnmarshalJSON deserializes the given payload.
+func (o *ListStreamComputeItems) UnmarshalJSON(bytes []byte) (err error) {
+	raw := map[string]interface{}{}
+	required := struct {
+		Aggregation *ListStreamComputeAggregation `json:"aggregation"`
+	}{}
+	all := struct {
+		Aggregation ListStreamComputeAggregation `json:"aggregation"`
+		Facet       *string                      `json:"facet,omitempty"`
+	}{}
+	err = json.Unmarshal(bytes, &required)
+	if err != nil {
+		return err
+	}
+	if required.Aggregation == nil {
+		return fmt.Errorf("required field aggregation missing")
+	}
+	err = json.Unmarshal(bytes, &all)
+	if err != nil {
+		err = json.Unmarshal(bytes, &raw)
+		if err != nil {
+			return err
+		}
+		o.UnparsedObject = raw
+		return nil
+	}
+	if v := all.Aggregation; !v.IsValid() {
+		err = json.Unmarshal(bytes, &raw)
+		if err != nil {
+			return err
+		}
+		o.UnparsedObject = raw
+		return nil
+	}
+	o.Aggregation = all.Aggregation
+	o.Facet = all.Facet
+	return nil
+}
diff --git a/api/datadogV1/model_list_stream_group_by_items.go b/api/datadogV1/model_list_stream_group_by_items.go
new file mode 100644
index 00000000000..eecf71f8e8d
--- /dev/null
+++ b/api/datadogV1/model_list_stream_group_by_items.go
@@ -0,0 +1,103 @@
+// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
+// This product includes software developed at Datadog (https://www.datadoghq.com/).
+// Copyright 2019-Present Datadog, Inc.
+
+package datadogV1
+
+import (
+	"encoding/json"
+	"fmt"
+)
+
+// ListStreamGroupByItems List of facets on which to group.
+type ListStreamGroupByItems struct {
+	// Facet name.
+	Facet string `json:"facet"`
+	// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
+	UnparsedObject       map[string]interface{} `json:"-"`
+	AdditionalProperties map[string]interface{}
+}
+
+// NewListStreamGroupByItems instantiates a new ListStreamGroupByItems object.
+// This constructor will assign default values to properties that have it defined,
+// and makes sure properties required by API are set, but the set of arguments
+// will change when the set of required properties is changed.
+func NewListStreamGroupByItems(facet string) *ListStreamGroupByItems {
+	this := ListStreamGroupByItems{}
+	this.Facet = facet
+	return &this
+}
+
+// NewListStreamGroupByItemsWithDefaults instantiates a new ListStreamGroupByItems object.
+// This constructor will only assign default values to properties that have it defined,
+// but it doesn't guarantee that properties required by API are set.
+func NewListStreamGroupByItemsWithDefaults() *ListStreamGroupByItems {
+	this := ListStreamGroupByItems{}
+	return &this
+}
+
+// GetFacet returns the Facet field value.
+func (o *ListStreamGroupByItems) GetFacet() string {
+	if o == nil {
+		var ret string
+		return ret
+	}
+	return o.Facet
+}
+
+// GetFacetOk returns a tuple with the Facet field value
+// and a boolean to check if the value has been set.
+func (o *ListStreamGroupByItems) GetFacetOk() (*string, bool) {
+	if o == nil {
+		return nil, false
+	}
+	return &o.Facet, true
+}
+
+// SetFacet sets field value.
+func (o *ListStreamGroupByItems) SetFacet(v string) {
+	o.Facet = v
+}
+
+// MarshalJSON serializes the struct using spec logic.
+func (o ListStreamGroupByItems) MarshalJSON() ([]byte, error) {
+	toSerialize := map[string]interface{}{}
+	if o.UnparsedObject != nil {
+		return json.Marshal(o.UnparsedObject)
+	}
+	toSerialize["facet"] = o.Facet
+
+	for key, value := range o.AdditionalProperties {
+		toSerialize[key] = value
+	}
+	return json.Marshal(toSerialize)
+}
+
+// UnmarshalJSON deserializes the given payload.
+func (o *ListStreamGroupByItems) UnmarshalJSON(bytes []byte) (err error) {
+	raw := map[string]interface{}{}
+	required := struct {
+		Facet *string `json:"facet"`
+	}{}
+	all := struct {
+		Facet string `json:"facet"`
+	}{}
+	err = json.Unmarshal(bytes, &required)
+	if err != nil {
+		return err
+	}
+	if required.Facet == nil {
+		return fmt.Errorf("required field facet missing")
+	}
+	err = json.Unmarshal(bytes, &all)
+	if err != nil {
+		err = json.Unmarshal(bytes, &raw)
+		if err != nil {
+			return err
+		}
+		o.UnparsedObject = raw
+		return nil
+	}
+	o.Facet = all.Facet
+	return nil
+}
diff --git a/api/datadogV1/model_list_stream_query.go b/api/datadogV1/model_list_stream_query.go
index 192c73b61fc..483eaaf1def 100644
--- a/api/datadogV1/model_list_stream_query.go
+++ b/api/datadogV1/model_list_stream_query.go
@@ -11,8 +11,12 @@ import (
 
 // ListStreamQuery Updated list stream widget.
 type ListStreamQuery struct {
+	// Compute configuration for the List Stream Widget. Compute can be used only with the logs_transaction_stream (from 1 to 5 items) list stream source.
+	Compute []ListStreamComputeItems `json:"compute,omitempty"`
 	// Source from which to query items to display in the stream.
 	DataSource ListStreamSource `json:"data_source"`
+	// Group by configuration for the List Stream Widget. Group by can be used only with logs_pattern_stream (up to 3 items) or logs_transaction_stream (one group by item is required) list stream source.
+	GroupBy []ListStreamGroupByItems `json:"group_by,omitempty"`
 	// List of indexes.
 	Indexes []string `json:"indexes,omitempty"`
 	// Widget query.
@@ -45,6 +49,34 @@ func NewListStreamQueryWithDefaults() *ListStreamQuery {
 	return &this
 }
 
+// GetCompute returns the Compute field value if set, zero value otherwise.
+func (o *ListStreamQuery) GetCompute() []ListStreamComputeItems {
+	if o == nil || o.Compute == nil {
+		var ret []ListStreamComputeItems
+		return ret
+	}
+	return o.Compute
+}
+
+// GetComputeOk returns a tuple with the Compute field value if set, nil otherwise
+// and a boolean to check if the value has been set.
+func (o *ListStreamQuery) GetComputeOk() (*[]ListStreamComputeItems, bool) {
+	if o == nil || o.Compute == nil {
+		return nil, false
+	}
+	return &o.Compute, true
+}
+
+// HasCompute returns a boolean if a field has been set.
+func (o *ListStreamQuery) HasCompute() bool {
+	return o != nil && o.Compute != nil
+}
+
+// SetCompute gets a reference to the given []ListStreamComputeItems and assigns it to the Compute field.
+func (o *ListStreamQuery) SetCompute(v []ListStreamComputeItems) {
+	o.Compute = v
+}
+
 // GetDataSource returns the DataSource field value.
 func (o *ListStreamQuery) GetDataSource() ListStreamSource {
 	if o == nil {
@@ -68,6 +100,34 @@ func (o *ListStreamQuery) SetDataSource(v ListStreamSource) {
 	o.DataSource = v
 }
 
+// GetGroupBy returns the GroupBy field value if set, zero value otherwise.
+func (o *ListStreamQuery) GetGroupBy() []ListStreamGroupByItems {
+	if o == nil || o.GroupBy == nil {
+		var ret []ListStreamGroupByItems
+		return ret
+	}
+	return o.GroupBy
+}
+
+// GetGroupByOk returns a tuple with the GroupBy field value if set, nil otherwise
+// and a boolean to check if the value has been set.
+func (o *ListStreamQuery) GetGroupByOk() (*[]ListStreamGroupByItems, bool) {
+	if o == nil || o.GroupBy == nil {
+		return nil, false
+	}
+	return &o.GroupBy, true
+}
+
+// HasGroupBy returns a boolean if a field has been set.
+func (o *ListStreamQuery) HasGroupBy() bool {
+	return o != nil && o.GroupBy != nil
+}
+
+// SetGroupBy gets a reference to the given []ListStreamGroupByItems and assigns it to the GroupBy field.
+func (o *ListStreamQuery) SetGroupBy(v []ListStreamGroupByItems) {
+	o.GroupBy = v
+}
+
 // GetIndexes returns the Indexes field value if set, zero value otherwise.
 func (o *ListStreamQuery) GetIndexes() []string {
 	if o == nil || o.Indexes == nil {
@@ -153,7 +213,13 @@ func (o ListStreamQuery) MarshalJSON() ([]byte, error) {
 	if o.UnparsedObject != nil {
 		return json.Marshal(o.UnparsedObject)
 	}
+	if o.Compute != nil {
+		toSerialize["compute"] = o.Compute
+	}
 	toSerialize["data_source"] = o.DataSource
+	if o.GroupBy != nil {
+		toSerialize["group_by"] = o.GroupBy
+	}
 	if o.Indexes != nil {
 		toSerialize["indexes"] = o.Indexes
 	}
@@ -176,10 +242,12 @@ func (o *ListStreamQuery) UnmarshalJSON(bytes []byte) (err error) {
 		QueryString *string           `json:"query_string"`
 	}{}
 	all := struct {
-		DataSource  ListStreamSource `json:"data_source"`
-		Indexes     []string         `json:"indexes,omitempty"`
-		QueryString string           `json:"query_string"`
-		Storage     *string          `json:"storage,omitempty"`
+		Compute     []ListStreamComputeItems `json:"compute,omitempty"`
+		DataSource  ListStreamSource         `json:"data_source"`
+		GroupBy     []ListStreamGroupByItems `json:"group_by,omitempty"`
+		Indexes     []string                 `json:"indexes,omitempty"`
+		QueryString string                   `json:"query_string"`
+		Storage     *string                  `json:"storage,omitempty"`
 	}{}
 	err = json.Unmarshal(bytes, &required)
 	if err != nil {
@@ -208,7 +276,9 @@ func (o *ListStreamQuery) UnmarshalJSON(bytes []byte) (err error) {
 		o.UnparsedObject = raw
 		return nil
 	}
+	o.Compute = all.Compute
 	o.DataSource = all.DataSource
+	o.GroupBy = all.GroupBy
 	o.Indexes = all.Indexes
 	o.QueryString = all.QueryString
 	o.Storage = all.Storage
diff --git a/api/datadogV1/model_list_stream_source.go b/api/datadogV1/model_list_stream_source.go
index a591fb88f64..375c19f7bb0 100644
--- a/api/datadogV1/model_list_stream_source.go
+++ b/api/datadogV1/model_list_stream_source.go
@@ -14,11 +14,12 @@ type ListStreamSource string
 
 // List of ListStreamSource.
 const (
-	LISTSTREAMSOURCE_LOGS_STREAM         ListStreamSource = "logs_stream"
-	LISTSTREAMSOURCE_AUDIT_STREAM        ListStreamSource = "audit_stream"
-	LISTSTREAMSOURCE_RUM_ISSUE_STREAM    ListStreamSource = "rum_issue_stream"
-	LISTSTREAMSOURCE_APM_ISSUE_STREAM    ListStreamSource = "apm_issue_stream"
-	LISTSTREAMSOURCE_LOGS_PATTERN_STREAM ListStreamSource = "logs_pattern_stream"
+	LISTSTREAMSOURCE_LOGS_STREAM             ListStreamSource = "logs_stream"
+	LISTSTREAMSOURCE_AUDIT_STREAM            ListStreamSource = "audit_stream"
+	LISTSTREAMSOURCE_RUM_ISSUE_STREAM        ListStreamSource = "rum_issue_stream"
+	LISTSTREAMSOURCE_APM_ISSUE_STREAM        ListStreamSource = "apm_issue_stream"
+	LISTSTREAMSOURCE_LOGS_PATTERN_STREAM     ListStreamSource = "logs_pattern_stream"
+	LISTSTREAMSOURCE_LOGS_TRANSACTION_STREAM ListStreamSource = "logs_transaction_stream"
 )
 
 var allowedListStreamSourceEnumValues = []ListStreamSource{
@@ -27,6 +28,7 @@ var allowedListStreamSourceEnumValues = []ListStreamSource{
 	LISTSTREAMSOURCE_RUM_ISSUE_STREAM,
 	LISTSTREAMSOURCE_APM_ISSUE_STREAM,
 	LISTSTREAMSOURCE_LOGS_PATTERN_STREAM,
+	LISTSTREAMSOURCE_LOGS_TRANSACTION_STREAM,
 }
 
 // GetAllowedValues reeturns the list of possible values.
diff --git a/examples/v1/dashboards/CreateDashboard_1039800684.go b/examples/v1/dashboards/CreateDashboard_1039800684.go
new file mode 100644
index 00000000000..f322f8f31d4
--- /dev/null
+++ b/examples/v1/dashboards/CreateDashboard_1039800684.go
@@ -0,0 +1,61 @@
+// Create a new dashboard with logs_pattern_stream list_stream widget
+
+package main
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"os"
+
+	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
+	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
+)
+
+func main() {
+	body := datadogV1.Dashboard{
+		LayoutType: datadogV1.DASHBOARDLAYOUTTYPE_ORDERED,
+		Title:      "Example-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget with list_stream widget",
+		Widgets: []datadogV1.Widget{
+			{
+				Definition: datadogV1.WidgetDefinition{
+					ListStreamWidgetDefinition: &datadogV1.ListStreamWidgetDefinition{
+						Type: datadogV1.LISTSTREAMWIDGETDEFINITIONTYPE_LIST_STREAM,
+						Requests: []datadogV1.ListStreamWidgetRequest{
+							{
+								Columns: []datadogV1.ListStreamColumn{
+									{
+										Width: datadogV1.LISTSTREAMCOLUMNWIDTH_AUTO,
+										Field: "timestamp",
+									},
+								},
+								Query: datadogV1.ListStreamQuery{
+									DataSource:  datadogV1.LISTSTREAMSOURCE_LOGS_PATTERN_STREAM,
+									QueryString: "",
+									GroupBy: []datadogV1.ListStreamGroupByItems{
+										{
+											Facet: "service",
+										},
+									},
+								},
+								ResponseFormat: datadogV1.LISTSTREAMRESPONSEFORMAT_EVENT_LIST,
+							},
+						},
+					}},
+			},
+		},
+	}
+	ctx := datadog.NewDefaultContext(context.Background())
+	configuration := datadog.NewConfiguration()
+	apiClient := datadog.NewAPIClient(configuration)
+	api := datadogV1.NewDashboardsApi(apiClient)
+	resp, r, err := api.CreateDashboard(ctx, body)
+
+	if err != nil {
+		fmt.Fprintf(os.Stderr, "Error when calling `DashboardsApi.CreateDashboard`: %v\n", err)
+		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
+	}
+
+	responseContent, _ := json.MarshalIndent(resp, "", "  ")
+	fmt.Fprintf(os.Stdout, "Response from `DashboardsApi.CreateDashboard`:\n%s\n", responseContent)
+}
diff --git a/examples/v1/dashboards/CreateDashboard_2843286292.go b/examples/v1/dashboards/CreateDashboard_2843286292.go
new file mode 100644
index 00000000000..925359bdcfc
--- /dev/null
+++ b/examples/v1/dashboards/CreateDashboard_2843286292.go
@@ -0,0 +1,67 @@
+// Create a new dashboard with logs_transaction_stream list_stream widget
+
+package main
+
+import (
+	"context"
+	"encoding/json"
+	"fmt"
+	"os"
+
+	"github.com/DataDog/datadog-api-client-go/v2/api/datadog"
+	"github.com/DataDog/datadog-api-client-go/v2/api/datadogV1"
+)
+
+func main() {
+	body := datadogV1.Dashboard{
+		LayoutType: datadogV1.DASHBOARDLAYOUTTYPE_ORDERED,
+		Title:      "Example-Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget with list_stream widget",
+		Widgets: []datadogV1.Widget{
+			{
+				Definition: datadogV1.WidgetDefinition{
+					ListStreamWidgetDefinition: &datadogV1.ListStreamWidgetDefinition{
+						Type: datadogV1.LISTSTREAMWIDGETDEFINITIONTYPE_LIST_STREAM,
+						Requests: []datadogV1.ListStreamWidgetRequest{
+							{
+								Columns: []datadogV1.ListStreamColumn{
+									{
+										Width: datadogV1.LISTSTREAMCOLUMNWIDTH_AUTO,
+										Field: "timestamp",
+									},
+								},
+								Query: datadogV1.ListStreamQuery{
+									DataSource:  datadogV1.LISTSTREAMSOURCE_LOGS_TRANSACTION_STREAM,
+									QueryString: "",
+									GroupBy: []datadogV1.ListStreamGroupByItems{
+										{
+											Facet: "service",
+										},
+									},
+									Compute: []datadogV1.ListStreamComputeItems{
+										{
+											Facet:       datadog.PtrString("service"),
+											Aggregation: datadogV1.LISTSTREAMCOMPUTEAGGREGATION_COUNT,
+										},
+									},
+								},
+								ResponseFormat: datadogV1.LISTSTREAMRESPONSEFORMAT_EVENT_LIST,
+							},
+						},
+					}},
+			},
+		},
+	}
+	ctx := datadog.NewDefaultContext(context.Background())
+	configuration := datadog.NewConfiguration()
+	apiClient := datadog.NewAPIClient(configuration)
+	api := datadogV1.NewDashboardsApi(apiClient)
+	resp, r, err := api.CreateDashboard(ctx, body)
+
+	if err != nil {
+		fmt.Fprintf(os.Stderr, "Error when calling `DashboardsApi.CreateDashboard`: %v\n", err)
+		fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r)
+	}
+
+	responseContent, _ := json.MarshalIndent(resp, "", "  ")
+	fmt.Fprintf(os.Stdout, "Response from `DashboardsApi.CreateDashboard`:\n%s\n", responseContent)
+}
diff --git a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze
new file mode 100644
index 00000000000..2c74d6243dd
--- /dev/null
+++ b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze
@@ -0,0 +1 @@
+2023-01-13T16:43:57.804Z
\ No newline at end of file
diff --git a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml
new file mode 100644
index 00000000000..4dbcdb9d8fe
--- /dev/null
+++ b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml
@@ -0,0 +1,42 @@
+interactions:
+- request:
+    body: |
+      {"layout_type":"ordered","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1673628237 with list_stream widget","widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"}],"query":{"data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"}}]}
+    form: {}
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+    method: POST
+    url: https://api.datadoghq.com/api/v1/dashboard
+  response:
+    body: '{"notify_list":null,"description":null,"restricted_roles":[],"author_name":null,"template_variables":null,"is_read_only":false,"id":"vqp-zrn-fki","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1673628237
+      with list_stream widget","url":"/dashboard/vqp-zrn-fki/test-createanewdashboardwithlogspatternstreamliststreamwidget-1673628237-with-li","created_at":"2023-01-13T16:43:57.974008+00:00","modified_at":"2023-01-13T16:43:57.974008+00:00","author_handle":"frog@datadoghq.com","widgets":[{"definition":{"requests":[{"query":{"query_string":"","group_by":[{"facet":"service"}],"data_source":"logs_pattern_stream"},"response_format":"event_list","columns":[{"field":"timestamp","width":"auto"}]}],"type":"list_stream"},"id":8006246428423069}],"layout_type":"ordered"}
+
+      '
+    code: 200
+    duration: ''
+    headers:
+      Content-Type:
+      - application/json
+    status: 200 OK
+- request:
+    body: ''
+    form: {}
+    headers:
+      Accept:
+      - application/json
+    method: DELETE
+    url: https://api.datadoghq.com/api/v1/dashboard/vqp-zrn-fki
+  response:
+    body: '{"deleted_dashboard_id":"vqp-zrn-fki"}
+
+      '
+    code: 200
+    duration: ''
+    headers:
+      Content-Type:
+      - application/json
+    status: 200 OK
+version: 1
diff --git a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.freeze b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.freeze
new file mode 100644
index 00000000000..58383d7a8be
--- /dev/null
+++ b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.freeze
@@ -0,0 +1 @@
+2023-01-13T16:43:58.213Z
\ No newline at end of file
diff --git a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.yaml b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.yaml
new file mode 100644
index 00000000000..71e4aa12bbc
--- /dev/null
+++ b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget.yaml
@@ -0,0 +1,42 @@
+interactions:
+- request:
+    body: |
+      {"layout_type":"ordered","title":"Test-Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget-1673628238 with list_stream widget","widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"}],"query":{"compute":[{"aggregation":"count","facet":"service"}],"data_source":"logs_transaction_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"}}]}
+    form: {}
+    headers:
+      Accept:
+      - application/json
+      Content-Type:
+      - application/json
+    method: POST
+    url: https://api.datadoghq.com/api/v1/dashboard
+  response:
+    body: '{"notify_list":null,"description":null,"restricted_roles":[],"author_name":null,"template_variables":null,"is_read_only":false,"id":"89c-p48-tve","title":"Test-Create_a_new_dashboard_with_logs_transaction_stream_list_stream_widget-1673628238
+      with list_stream widget","url":"/dashboard/89c-p48-tve/test-createanewdashboardwithlogstransactionstreamliststreamwidget-1673628238-wit","created_at":"2023-01-13T16:43:58.328143+00:00","modified_at":"2023-01-13T16:43:58.328143+00:00","author_handle":"frog@datadoghq.com","widgets":[{"definition":{"requests":[{"query":{"query_string":"","group_by":[{"facet":"service"}],"data_source":"logs_transaction_stream","compute":[{"facet":"service","aggregation":"count"}]},"response_format":"event_list","columns":[{"field":"timestamp","width":"auto"}]}],"type":"list_stream"},"id":4678220055282526}],"layout_type":"ordered"}
+
+      '
+    code: 200
+    duration: ''
+    headers:
+      Content-Type:
+      - application/json
+    status: 200 OK
+- request:
+    body: ''
+    form: {}
+    headers:
+      Accept:
+      - application/json
+    method: DELETE
+    url: https://api.datadoghq.com/api/v1/dashboard/89c-p48-tve
+  response:
+    body: '{"deleted_dashboard_id":"89c-p48-tve"}
+
+      '
+    code: 200
+    duration: ''
+    headers:
+      Content-Type:
+      - application/json
+    status: 200 OK
+version: 1
diff --git a/tests/scenarios/features/v1/dashboards.feature b/tests/scenarios/features/v1/dashboards.feature
index 769b60a2eaf..4e9488c10bd 100644
--- a/tests/scenarios/features/v1/dashboards.feature
+++ b/tests/scenarios/features/v1/dashboards.feature
@@ -332,6 +332,15 @@ Feature: Dashboards
     And the response "widgets[0].definition.requests[0].queries[0].data_source" is equal to "logs"
     And the response "widgets[0].definition.requests[0].queries[0].storage" is equal to "online_archives"
 
+  @team:DataDog/dashboards
+  Scenario: Create a new dashboard with logs_pattern_stream list_stream widget
+    Given new "CreateDashboard" request
+    And body with value {"layout_type": "ordered", "title": "{{ unique }} with list_stream widget","widgets": [{"definition": {"type": "list_stream","requests": [{"columns":[{"width":"auto","field":"timestamp"}],"query":{"data_source":"logs_pattern_stream","query_string":"","group_by":[{"facet":"service"}]},"response_format":"event_list"}]}}]}
+    When the request is sent
+    Then the response status is 200 OK
+    And the response "widgets[0].definition.requests[0].query.data_source" is equal to "logs_pattern_stream"
+    And the response "widgets[0].definition.requests[0].query.group_by[0].facet" is equal to "service"
+
   @team:DataDog/dashboards
   Scenario: Create a new dashboard with logs_stream list_stream widget and storage parameter
     Given new "CreateDashboard" request
@@ -342,6 +351,17 @@ Feature: Dashboards
     And the response "widgets[0].definition.requests[0].query.data_source" is equal to "logs_stream"
     And the response "widgets[0].definition.requests[0].query.storage" is equal to "hot"
 
+  @team:DataDog/dashboards
+  Scenario: Create a new dashboard with logs_transaction_stream list_stream widget
+    Given new "CreateDashboard" request
+    And body with value {"layout_type": "ordered", "title": "{{ unique }} with list_stream widget","widgets": [{"definition": {"type": "list_stream","requests": [{"columns":[{"width":"auto","field":"timestamp"}],"query":{"data_source":"logs_transaction_stream","query_string":"","group_by":[{"facet":"service"}],"compute":[{"facet":"service","aggregation":"count"}]},"response_format":"event_list"}]}}]}
+    When the request is sent
+    Then the response status is 200 OK
+    And the response "widgets[0].definition.requests[0].query.data_source" is equal to "logs_transaction_stream"
+    And the response "widgets[0].definition.requests[0].query.group_by[0].facet" is equal to "service"
+    And the response "widgets[0].definition.requests[0].query.compute[0].facet" is equal to "service"
+    And the response "widgets[0].definition.requests[0].query.compute[0].aggregation" is equal to "count"
+
   @team:DataDog/dashboards
   Scenario: Create a new dashboard with manage_status widget
     Given new "CreateDashboard" request