Skip to content

Commit

Permalink
wrangler ui features
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkitCLI committed Nov 4, 2024
1 parent aeae23d commit c399e9b
Show file tree
Hide file tree
Showing 8 changed files with 57 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ Feature: datatype parsers
Then Select connection data row with name: "dataset"
Then Select connection data row with name: "bqSourceTable"
Then Verify connection datatable is displayed for the data: "bqSourceTable"
Then Expand dropdown column: "create_date" and apply directive: "Parse" as "Simple date" with: "yyyy-MM-dd" option
Then Expand dropdown column: "update_date" and apply directive: "Parse" as "Simple date" with: "yyyy-MM-dd" option
Then Expand dropdown column: "update_date" and apply directive: "Parse" as "SIMPLEDATE" with: "yyyy-MM-dd" option
Then Expand dropdown column: "create_date" and apply directive: "Parse" as "SIMPLEDATE" with: "yyyy-MM-dd" option
Then Enter directive from CLI "parse-timestamp :time"
Then Enter directive from CLI "parse-as-currency :price :newprice"
Then Enter directive from CLI "format-as-currency :newprice :format_price"
Expand Down Expand Up @@ -85,7 +85,7 @@ Feature: datatype parsers
Then Select connection data row with name: "dataset"
Then Select connection data row with name: "bqSourceTable"
Then Verify connection datatable is displayed for the data: "bqSourceTable"
Then Expand dropdown column: "timestamp" and apply directive: "Parse" with directive type: "Datetime" and select: "Custom format" and enter: "yyyy-MM-dd'T'HH:mm:ssX'['z']'"
Then Expand dropdown column: "timestamp" and apply directive: "Parse" with directive type: "DATETIME" and select: "Custom_Format" and enter: "yyyy-MM-dd'T'HH:mm:ssX'['z']'"
Then Enter directive from CLI "current-datetime :create_date"
Then Enter directive from CLI "datetime-to-timestamp :timestamp"
Then Enter directive from CLI "format-datetime :create_date 'y'"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Feature: parse as fixed length
Then Select connection data row with name: "dataset"
Then Select connection data row with name: "bqSourceTable"
Then Verify connection datatable is displayed for the data: "bqSourceTable"
Then Expand dropdown column: "fixedlength" and apply directive: "Parse" as "Fixed length" with: "2,4,5,3" option
Then Expand dropdown column: "fixedlength" and apply directive: "Parse" as "FIXEDLENGTH" with: "2,4,5,3" option
Then Enter directive from CLI "split-url url"
Then Enter directive from CLI "write-as-csv :url_protocol"
Then Enter directive from CLI "url-encode :url"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ Feature: parse as HL7
Then Enter directive from CLI "set-type :Body string"
Then Enter directive from CLI "keep address,Body,Body_hl7_MSH_12,Body_hl7_MSH_9_1"
Then Expand dropdown column: "address" and apply directive: "FindAndReplace" and select: "address1" and enter: "test"
Then Expand dropdown column: "Body_hl7_MSH_9_1" and apply directive: "Maskdata" as "By shuffling"
Then Expand dropdown column: "address" and apply directive: "Sendtoerror" as "value is empty"
Then Expand dropdown column: "Body_hl7_MSH_9_1" and apply directive: "MaskData" as "By_shuffling"
Then Expand dropdown column: "address" and apply directive: "SendToError" as "value_is_empty"
Then Enter directive from CLI "rename :Body_hl7_MSH_12 :id "
Then Click Create Pipeline button and choose the type of pipeline as: "Batch pipeline"
Then Verify plugin: "BigQueryTable" node is displayed on the canvas with a timeout of 120 seconds
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Feature: parse as Json
Then Select connection data row with name: "bqSourceTable"
Then Verify connection datatable is displayed for the data: "bqSourceTable"
Then Expand dropdown column: "json" and apply directive: "Parse" as "JSON" with: "1" option
Then Expand dropdown column: "body" and apply directive: "Format" as "Trim leading whitespace"
Then Expand dropdown column: "body" and apply directive: "Format" as "Trim_leading_whitespace"
Then Enter directive from CLI "set-column :desc concat(json_pet,body)"
Then Expand dropdown column: "json_name" and apply directive: "CopyColumn" as "copied"
Then Select checkbox on two columns: "json_id" and "json_age"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Feature: Wrangler - Run time scenarios for Parse Log
Then Select connection data row with name: "dataset"
Then Select connection data row with name: "bqSourceTable"
Then Verify connection datatable is displayed for the data: "bqSourceTable"
Then Expand dropdown column: "body" and apply directive: "Parse" as "Log" with: "Common" option
Then Expand dropdown column: "body" and apply directive: "Parse" as "LOG" with: "Common" option
Then Expand dropdown column: "number_connection_client_logname_last" and apply directive: "DeleteColumn"
Then Expand dropdown column: "number_connection_client_logname" and apply directive: "DeleteColumn"
Then Expand dropdown column: "http_querystring_request_firstline_uri_query" and apply directive: "DeleteColumn"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ Feature: parse as XmlToJson
Then Select connection data row with name: "dataset"
Then Select connection data row with name: "bqSourceTable"
Then Verify connection datatable is displayed for the data: "bqSourceTable"
Then Expand dropdown column: "xmldata" and apply directive: "Parse" as "XML to JSON" with: "1" option
Then Expand dropdown column: "xmldata" and apply directive: "Parse" as "XMLTOJSON" with: "1" option
Then Enter directive from CLI "split-email :email"
Then Enter directive from CLI "text-distance block email email_account distance"
Then Enter directive from CLI "text-metric longest-common-subsequence email email_account distance2"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
ChangeDataType=changeDataType
Sendtoerror=markAsError
SendToError=markAsError
SwapTwoColumnNames=swapColumns
DeleteColumn=dropColumn
Hash=hash
Parse=parse
Maskdata=maskData
MaskData=maskData
FindAndReplace=findAndReplace
Format=format
Calculate=calculate
Expand All @@ -17,3 +17,48 @@ ExtractFields=extractFields
Explode=explode
DefineVariable=defineVariable
SetCounter=setCounter
Concatenate=CONCATENATE
Always=ALWAYS
Integer=integer
Comma=COMMA
Common=COMMON
Tab=TAB
Space=SPACE
Pipe=PIPE
CONTROL_A=^A
CONTROL_D=^D
CSV=csv
Avro=AVRO
EXCEL=excel
JSON=singleField
XMLTOJSON=xmlToJson
LOG=log
SIMPLEDATE=dateFormats
DATETIME=dateFormats
FIXEDLENGTH=singleField
HL7=HL7
Decimal=decimal
lowercase=LOWERCASE
Trim_Whitespace=TRIM_WHITESPACE
Character_count=CHARCOUNT
Using_patterns=patterns
Using_delimiters=delimiters
Delimited_text=delimited
Array_(by_flattening)=arrayFlattening
Record_(by_flattening)=recordFlattening
Show_last_4_characters_only=last4Chars
Show_last_2_characters_only=last2Chars
Custom_selection=customSelection
By_shuffling=shuffling
value_is_empty=EMPTY
value_is=TEXTEXACTLY
value_contains=TEXTCONTAINS
value_starts_with=TEXTSTARTSWITH
Trim_leading_whitespace=TRIM_LEADING_WHITESPACE
Custom_Format=CUSTOM
yyyy-MM-dd=OPTION5





Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ dataset=Wrangler_Test
dataset2=Wrangler
#expectedBQFiles
ExpectedDirective_GroupBy=BQValidationExpectedFiles/Directive_wrangler_GroupBy
filterEmptyProperty=value is empty

ExpectedDirective_parse_FixedLength=BQValidationExpectedFiles/Directive_parse_fixedlength
ExpectedDirective_parse_hl7=BQValidationExpectedFiles/Directive_parse_hl7
Expand Down

0 comments on commit c399e9b

Please sign in to comment.