-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathtemplate_pipeline.xml
executable file
·84 lines (78 loc) · 3.39 KB
/
template_pipeline.xml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
<?xml version="1.0" encoding="UTF-8" ?>
<pipeline-def name="event-consolidation" description="This is the process for transforming event data" version="1.0.0">
<settings>
<singleSparkSession setting="true" />
<globalViewAsLocal setting="true" />
</settings>
<aliases include="./src/test/resources/pipelines/miscellaneous/alias.xml">
<alias name="setting" type="com.qwshen.etl.common.SparkConfActor" />
</aliases>
<udf-registration include="./src/test/resources/pipelines/miscellaneous/udf-registration.xml">
<register prefix="user_" type="com.qwshen.etl.test.udf.UserUdf" />
</udf-registration>
<variables>
<variable name="iam_password" value="${events.db.password}" decryptionKeyString="${application.security.decryption.key}" />
<variable name="process_date" value="${application.process_date}" />
<variable name="staging_uri" value="file:///c:/temp/staging" />
<variable name="metrics_uri" value="file:///c:/temp/metrics" />
</variables>
<job include="./src/test/resources/pipelines/jobs/job.xml" />
<job name="transform-user-events">
<action name="load-users">
<actor type="file">
<properties>
<format>csv</format>
<options>
<header>false</header>
<delimiter>,</delimiter>
<quote>"</quote>
<timestampFormat>yyyy/MM/dd HH:mm:ss</timestampFormat>
</options>
<ddlSchemaString>user_id long, birth_year int, gender string, location string</ddlSchemaString>
<fileUri>${events.users_input}</fileUri>
</properties>
</actor>
<output-view name="users" global="true" />
</action>
<action name="load-events">
<actor type="flat">
<properties>
<fileUri>${events.events_input}</fileUri>
</properties>
</actor>
<output-view name="events_raw" global="false" />
</action>
<action name="transform-events">
<actor type="sql">
<properties>
<sqlString>
select
substr(row_value, 1, 12) as event_id,
substr(row_value, 13, 16) as event_time,
substr(row_value, 29, 12) as event_host,
substr(row_value, 41, 64) as event_location
from events_raw
where row_no not in (1, 2) and substr(row_value, 6, 5) != 'TFYKR'
</sqlString>
</properties>
</actor>
<input-views>
<view name="events_raw" />
</input-views>
<output-view name="events" global="true" />
</action>
</job>
<metrics-logging enabled="true">
<uri>${metrics_uri}</uri>
<actions>
<action name="load-events" />
</actions>
</metrics-logging>
<debug-staging enabled="true">
<uri>${staging_uri}</uri>
<actions>
<action name="transform-events" />
<action name="load-events" />
</actions>
</debug-staging>
</pipeline-def>