Skip to content

Commit

Permalink
swap to json schemas, add new vars
Browse files Browse the repository at this point in the history
  • Loading branch information
rlh1994 committed Mar 14, 2024
1 parent f3e8ac6 commit e742d7c
Show file tree
Hide file tree
Showing 6 changed files with 1,075 additions and 948 deletions.
394 changes: 0 additions & 394 deletions src/components/JsonSchemaValidator/Schemas/dbtEcommerce_0.8.1.js

This file was deleted.

445 changes: 445 additions & 0 deletions src/components/JsonSchemaValidator/Schemas/dbtEcommerce_0.8.1.json

Large diffs are not rendered by default.

396 changes: 0 additions & 396 deletions src/components/JsonSchemaValidator/Schemas/dbtMediaPlayer_0.7.2.js

This file was deleted.

449 changes: 449 additions & 0 deletions src/components/JsonSchemaValidator/Schemas/dbtMediaPlayer_0.7.2.json

Large diffs are not rendered by default.

158 changes: 0 additions & 158 deletions src/components/JsonSchemaValidator/Schemas/dbtNormalize_0.3.5.js

This file was deleted.

181 changes: 181 additions & 0 deletions src/components/JsonSchemaValidator/Schemas/dbtNormalize_0.3.5.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
{
"definitions":{
"passthrough_vars":{
"type":"array",
"description":"> Click the plus sign to add a new entry",
"minItems":0,
"items":{
"title":"Type",
"oneOf":[
{
"type":"string",
"title":"Column Name"
},
{
"type":"object",
"title":"SQL & Alias",
"properties":{
"sql":{
"type":"string"
},
"alias":{
"type":"string"
}
},
"required":[
"sql",
"alias"
],
"additionalProperties":false
}
]
},
"uniqueItems":true
}
},
"type":"object",
"properties":{
"snowplow__atomic_schema":{
"type":"string",
"title":"Schema",
"description":"Schema (dataset) that contains your atomic events",
"longDescription":"The schema (dataset for BigQuery) that contains your atomic events table.",
"packageDefault":"atomic",
"group":"Warehouse and Tracker"
},
"snowplow__database":{
"type":"string",
"title":"Database",
"description":"Database that contains your atomic events",
"longDescription":"The database that contains your atomic events table.",
"packageDefault":"target.database",
"group":"Warehouse and Tracker"
},
"snowplow__dev_target_name":{
"type":"string",
"title":"Dev Target",
"description":"Target name of your development environment as defined in your `profiles.yml` file",
"longDescription":"The [target name](https://docs.getdbt.com/docs/core/connect-data-platform/profiles.yml) of your development environment as defined in your `profiles.yml` file. See the [Manifest Tables](/docs/modeling-your-data/modeling-your-data-with-dbt/package-mechanics/manifest-tables/) section for more details.",
"packageDefault":"dev",
"group":"Warehouse and Tracker"
},
"snowplow__events":{
"type":"string",
"title":"Events Table",
"description":"Reference to your events table",
"longDescription":"This is used internally by the packages to reference your events table based on other variable values and should not be changed.",
"packageDefault":"events",
"group":"Warehouse and Tracker"
},
"snowplow__allow_refresh":{
"type":"boolean",
"title":"Allow Refresh",
"group":"Operation and Logic",
"longDescription":"Used as the default value to return from the `allow_refresh()` macro. This macro determines whether the manifest tables can be refreshed or not, depending on your environment. See the [Manifest Tables](/docs/modeling-your-data/modeling-your-data-with-dbt/package-mechanics/manifest-tables/) section for more details.",
"packageDefault":"false"
},
"snowplow__backfill_limit_days":{
"type":"number",
"minimum":0,
"title":"Backfill Limit",
"group":"Operation and Logic",
"longDescription":"The maximum numbers of days of new data to be processed since the latest event processed. Please refer to the [incremental logic](docs/modeling-your-data/modeling-your-data-with-dbt/package-mechanics/incremental-processing/#package-state) section for more details.",
"packageDefault":"30",
"description":"The maximum numbers of days of new data to be processed since the latest event processed"
},
"snowplow__days_late_allowed":{
"type":"number",
"minimum":0,
"title":"Days Late Allowed",
"group":"Operation and Logic",
"longDescription":"The maximum allowed number of days between the event creation and it being sent to the collector. Exists to reduce lengthy table scans that can occur as a result of late arriving data.",
"packageDefault":"3",
"description":"The maximum allowed number of days between the event creation and it being sent to the collector"
},
"snowplow__lookback_window_hours":{
"type":"number",
"minimum":0,
"title":"Event Lookback Window",
"longDescription":"The number of hours to look before the latest event processed - to account for late arriving data, which comes out of order.",
"packageDefault":"6",
"group":"Operation and Logic",
"description":"The number of hours to look before the latest event processed - to account for late arriving data, which comes out of order"
},
"snowplow__start_date":{
"type":"string",
"format":"date",
"title":"Start Date",
"group":"Operation and Logic",
"longDescription":"The date to start processing events from in the package on first run or a full refresh, based on `collector_tstamp`",
"packageDefault":"2020-01-01",
"description":"The date to start processing events from in the package on first run or a full refresh, based on `collector_tstamp`"
},
"snowplow__session_timestamp":{
"type":"string",
"title":"Start Date",
"group":"Operation and Logic",
"longDescription":"This determines which timestamp is used to process sessions of data. It's a good idea to have this timestamp be the same timestamp as the field you partition your events table on.",
"packageDefault":"collector_tstamp",
"description":"Determines which timestamp is used to process sessions of data"
},
"snowplow__upsert_lookback_days":{
"type":"number",
"minimum":0,
"title":"Upsert Lookback Days",
"group":"Operation and Logic",
"longDescription":"Number of days to look back over the incremental derived tables during the upsert. Where performance is not a concern, should be set to as long a value as possible. Having too short a period can result in duplicates. Please see the [Snowplow Optimized Materialization](docs/modeling-your-data/modeling-your-data-with-dbt/package-mechanics/optimized-upserts/) section for more details.",
"packageDefault":"30",
"description":"Number of days to look back over the incremental derived tables during the upsert"
},
"snowplow__app_id":{
"type":"array",
"description":"> Click the plus sign to add a new entry",
"minItems":0,
"title":"App IDs",
"longDescription":"A list of `app_id`s to filter the events table on for processing within the package.",
"packageDefault":"[ ] (no filter applied)",
"group":"Contexts, Filters, and Logs",
"items":{
"type":"string"
}
},
"snowplow__databricks_catalog":{
"type":"string",
"title":"(Databricks) Catalog",
"warehouse":"Databricks",
"group":"Warehouse Specific",
"longDescription":"The catalogue your atomic events table is in. Depending on the use case it should either be the catalog (for Unity Catalog users from databricks connector 1.1.1 onwards, defaulted to `hive_metastore`) or the same value as your `snowplow__atomic_schema` (unless changed it should be 'atomic').",
"packageDefault":"hive_metastore",
"description":"The catalogue your atomic events table is in"
},
"snowplow__derived_tstamp_partitioned":{
"type":"boolean",
"warehouse":"Bigquery",
"title":"(Bigquery) Dervied Timestamp Partition",
"longDescription":"Boolean to enable filtering the events table on `derived_tstamp` in addition to `collector_tstamp`.",
"packageDefault":"true",
"group":"Warehouse Specific"
},
"snowplow__grant_select_to":{
"type":"array",
"description":"> Click the plus sign to add a new entry",
"minItems":0,
"items":{
"type":"string",
"title":"User/Role"
},
"title":"Grant Select List",
"group":"Warehouse and Tracker",
"longDescription":"A list of users to grant select to all tables created by this package to.",
"packageDefault":"[]"
},
"snowplow__grant_schema_usage":{
"type":"boolean",
"description":"Enable granting usage on schemas",
"title":"Enable grant usage",
"group":"Warehouse and Tracker",
"longDescription":"Enables granting usage on schemas interacted with on a dbt run",
"packageDefault":"true"
}
}
}

0 comments on commit e742d7c

Please sign in to comment.