Open Script
MQL Library (622792 line breaks)
Back
ETL from SCP/XML to MENTDB/CSV.mql
ETL script with application 'supervision' compatibility
if (not (group exist "0001_folder1_folder2_folder3")) {
	group add "0001_folder1_folder2_folder3";
};

script merge "folder1.folder2.folder3.step_1_source_connect.exe" true 5
  (param
  )
  "Connect to the source"
{

	# CONFIGURATION ;
	-> "[CONF_NAME_OF_THE_FLOW]" "name_of_the_flow";
	-> "[CONF_SOURCE_CM]" "demo_cm";

	# INITIALIZATION ;
	-> "[FLOW_PID]" [PID];
	
	# HANDLE ;
	try {
	
		# Flow initialization;
		stack flow_init [FLOW_PID] [CONF_NAME_OF_THE_FLOW] "{}";
		stack flow_step [FLOW_PID] 1 "source_connect...";

		ssh connect "session_remote" {cm get [CONF_SOURCE_CM];};
		
		#Step 1 is valid;
		stack flow_step [FLOW_PID] 1 "source_connect_ok";
		
		include "folder1.folder2.folder3.step_2_extract.exe";
	
	} {

		try {ssh disconnect "session_remote";} {} "[err]";

		#Step 1 is not valid;
		stack flow_step [FLOW_PID] 1 "source_connect_KO";
		
		# Give the error to the stack and stop the process;
		exception (1) ([global_err]);
	
	} "[global_err]";
	
} "Return nothing";
if (not (group is granted script "folder1.folder2.folder3.step_1_source_connect.exe" "0001_folder1_folder2_folder3")) {
	group grant script "folder1.folder2.folder3.step_1_source_connect.exe" "0001_folder1_folder2_folder3";
};

script merge "folder1.folder2.folder3.step_2_extract.exe" true 1
  (param
  )
  "Extract files from the source"
{

	# CONFIGURATION ;
	-> "[CONF_DIR_SOURCE]" "tmp/source";
	-> "[CONF_SOURCE_FILTER]" "*xml";
	
	# HANDLE ;
	try {

		# Mark the flow as step 2;
		stack flow_step [FLOW_PID] 2 "extract...";

		# Create the local directory PID;
		file mkdir (concat "home/" [FLOW_PID]);

		# Get files into PID directory;
		log write (concat "Get files from '" [CONF_DIR_SOURCE] "/" [CONF_SOURCE_FILTER] "'.") OK null null;
		
		json load "valid_files" "[]";
		file create "tmp/0001_folder1_folder2_folder3.csv" (concat "FILENAME" [_n_] (ssh execute_1_cmd "session_remote" (concat "ls " [CONF_DIR_SOURCE] "/" [CONF_SOURCE_FILTER])));
		csv parse "T" "tmp/0001_folder1_folder2_folder3.csv" "," "'" "FILENAME" {
			json load "row" "{}";
			json iobject "row" / "name" [T_FILENAME] STR;
			json iarray "valid_files" / (json doc "row") OBJ;
		};
		
		json parse_array "valid_files" "/" "obj" {

			-> "[filename]" (json select "obj" /name);
			ssh scp from "session_remote" (concat [CONF_DIR_SOURCE] "/" [filename]) (concat "home/" [FLOW_PID] "/" [filename]);
			log write (concat "Get file 'home/" [FLOW_PID] "/" [filename] "'.") OK null null;
		
		};

		# Delete source file;
		json parse_array "valid_files" "/" "obj" {

			-> "[filename]" (json select "obj" /name);
			ssh execute_1_cmd "session_remote" (concat "rm " [CONF_DIR_SOURCE] "/" [filename]);
			log write (concat "Remove source file '" [CONF_DIR_SOURCE] "/" [filename] "'.") OK null null;

		};

		file delete "tmp/0001_folder1_folder2_folder3.csv";
		ssh disconnect "session_remote";
	
	} {

		try {file delete "tmp/0001_folder1_folder2_folder3.csv";} {} "[err]";
		try {ssh disconnect "session_remote";} {} "[err]";

		#Step 2 is not valid;
		stack flow_step [FLOW_PID] 2 "extract_KO";
		
		# Give the error to the stack and stop the process;
		exception (1) ([global_err]);
	
	} "[global_err]";
	
	log write (concat (json count "valid_files" /) " file(s) to transform.") OK null null;

	if (> (json count "valid_files" /) 0) {

		#Step 2 is valid;
		stack flow_step [FLOW_PID] 2 (concat "extract_ok_" (json count "valid_files" /) "_files");
		
		json parse_array "valid_files" "/" "obj" {

			-> "[filename]" (json select "obj" /name);
		
			stack (date now) "folder1.folder2.folder3.step_3_transform.exe" "[FLOW_PID]" [FLOW_PID] "[filename]" [filename];
	
		};
		
	} {

		#Step 2 is valid;
		stack flow_step [FLOW_PID] 2 "extract_ok_zero_file";
		
		# No file into the directory;
		file delete (concat "home/" [FLOW_PID]);

	};

} "Return nothing";
if (not (group is granted script "folder1.folder2.folder3.step_2_extract.exe" "0001_folder1_folder2_folder3")) {
	group grant script "folder1.folder2.folder3.step_2_extract.exe" "0001_folder1_folder2_folder3";
};

script merge "folder1.folder2.folder3.step_3_transform.exe" true 1
  (param
  	(var "[FLOW_PID]" {true} "The flow PID" is_null:true is_empty:true "1")
  	(var "[filename]" {true} "The filename" is_null:true is_empty:true "file.xxx")
  )
  "Transform the file"
{

	# CONFIGURATION ;
	-> "[CONF_DESTINATION_NEW_FILENAME]" (concat "new_filename_" (date systimestamp_min) ".csv");
	-> "[CONF_DESTINATION_CSV_SEPARATOR]" ";";
	-> "[CONF_DESTINATION_CSV_QUOTE]" "\"";
	-> "[CONF_DESTINATION_CSV_FORCE_COLUMN]" "D;E;F";
	
	# HANDLE ;
	try {

		# Mark the flow as step 3;
		stack flow_step [FLOW_PID] 3 "transform...";

		file writer_open "w1" (concat "home/" [FLOW_PID] "/" [CONF_DESTINATION_NEW_FILENAME]) true TEXT "utf-8";
		file writer_add_line "w1" [CONF_DESTINATION_CSV_FORCE_COLUMN];
		file writer_flush "w1";

		log write (concat "Build the file 'home/" [FLOW_PID] "/" [CONF_DESTINATION_NEW_FILENAME] "' from 'home/" [FLOW_PID] "/" [filename] "'...") OK null null;

		xml load "flow_source" (file load (concat "home/" [FLOW_PID] "/" [filename]));
		
		# BEGIN METHOD 1;
		-> "[T_A]" (xml select text "flow_source" "/data/A");
		-> "[T_B]" (xml select text "flow_source" "/data/B");
		-> "[T_C]" (xml select text "flow_source" "/data/C");

		-> "[line]" "";
		concat_var "[line]" [_n_];
		concat_var "[line]" (string csv_value [T_A] [CONF_DESTINATION_CSV_SEPARATOR] [CONF_DESTINATION_CSV_QUOTE]) [CONF_DESTINATION_CSV_SEPARATOR];
		concat_var "[line]" (string csv_value [T_B] [CONF_DESTINATION_CSV_SEPARATOR] [CONF_DESTINATION_CSV_QUOTE]) [CONF_DESTINATION_CSV_SEPARATOR];
		concat_var "[line]" (string csv_value [T_C] [CONF_DESTINATION_CSV_SEPARATOR] [CONF_DESTINATION_CSV_QUOTE]);
		file writer_add_line "w1" [line];
		file writer_flush "w1";
		# END METHOD 1;
		
		# BEGIN METHOD 2;
		-> "[nb_rows]" (xml count "flow_source" "/data/rows/item");
		for (-> "[i_xml]" 1) (<= [i_xml] [nb_rows]) (++ "[i_xml]") {
		
			-> "[T_A]" (xml select text "flow_source" (concat "/data/rows/item[" [i_xml] "]/A"));
			-> "[T_B]" (xml select text "flow_source" (concat "/data/rows/item[" [i_xml] "]/B"));
			-> "[T_C]" (xml select text "flow_source" (concat "/data/rows/item[" [i_xml] "]/C"));

			-> "[line]" "";
			concat_var "[line]" [_n_];
			concat_var "[line]" (string csv_value [T_A] [CONF_DESTINATION_CSV_SEPARATOR] [CONF_DESTINATION_CSV_QUOTE]) [CONF_DESTINATION_CSV_SEPARATOR];
			concat_var "[line]" (string csv_value [T_B] [CONF_DESTINATION_CSV_SEPARATOR] [CONF_DESTINATION_CSV_QUOTE]) [CONF_DESTINATION_CSV_SEPARATOR];
			concat_var "[line]" (string csv_value [T_C] [CONF_DESTINATION_CSV_SEPARATOR] [CONF_DESTINATION_CSV_QUOTE]);
			file writer_add_line "w1" [line];
			file writer_flush "w1";
		
		};
		# END METHOD 2;

		file writer_close "w1";

		log write (concat "Builded.") OK null null;

		#Step 3 is valid;
		stack flow_step [FLOW_PID] 3 "transform_ok";
		
		stack (date now) "folder1.folder2.folder3.step_4_destination_connect.exe" "[FLOW_PID]" [FLOW_PID] "[filename]" [CONF_DESTINATION_NEW_FILENAME];
	
	} {

		try {file writer_close "w1";} {} "[err]";

		#Step 3 is not valid;
		stack flow_step [FLOW_PID] 3 "transform_KO";
		
		# Give the error to the stack and stop the process;
		exception (1) ([global_err]);
	
	} "[global_err]";

} "Return nothing";
if (not (group is granted script "folder1.folder2.folder3.step_3_transform.exe" "0001_folder1_folder2_folder3")) {
	group grant script "folder1.folder2.folder3.step_3_transform.exe" "0001_folder1_folder2_folder3";
};

script merge "folder1.folder2.folder3.step_4_destination_connect.exe" true 5
  (param
  	(var "[FLOW_PID]" {true} "The flow PID" is_null:true is_empty:true "1")
  	(var "[filename]" {true} "The filename" is_null:true is_empty:true "file.xxx")
  )
  "Connect to the destination"
{

	# CONFIGURATION ;
	-> "[CONF_DESTINATION_CM]" "demo_cm";
	
	# HANDLE ;
	try {
	
		# Flow initialization;
		stack flow_step [FLOW_PID] 4 "destination_connect...";

		tunnel connect "destination_sess" {cm get [CONF_DESTINATION_CM];};
		
		#Step 4 is valid;
		stack flow_step [FLOW_PID] 4 "destination_connect_ok";
		
		include "folder1.folder2.folder3.step_5_load.exe";
	
	} {

		try {tunnel disconnect "destination_sess";} {} "[err]";

		#Step 4 is not valid;
		stack flow_step [FLOW_PID] 4 "destination_connect_KO";
		
		# Give the error to the stack and stop the process;
		exception (1) ([global_err]);
	
	} "[global_err]";
	
} "Return nothing";
if (not (group is granted script "folder1.folder2.folder3.step_4_destination_connect.exe" "0001_folder1_folder2_folder3")) {
	group grant script "folder1.folder2.folder3.step_4_destination_connect.exe" "0001_folder1_folder2_folder3";
};

script merge "folder1.folder2.folder3.step_5_load.exe" true 1
  (param
  )
  "Load file to the destination"
{

	# CONFIGURATION ;
	-> "[CONF_DIR_DESTINATION]" "tmp/destination";
	
	# HANDLE ;
	try {

		# Mark the flow as step 5;
		stack flow_step [FLOW_PID] 5 "load...";
		
		log write (concat "Send '" (concat "home/" [FLOW_PID] "/" [filename]) "' to the '" (concat [CONF_DIR_DESTINATION] "/" [filename]) "' ...") OK null null;

		# Copy file into the destination directory;
		#Open a reader;
		file reader_open "r1" (concat "home/" [FLOW_PID] "/" [filename]) BINARY null;

		tunnel execute "destination_sess" (concat 
			"-> \"[remote_destination]\" \"" (mql encode (concat [CONF_DIR_DESTINATION] "/" [filename])) "\";"
			(mql {
				file writer_open "w1" [remote_destination] true BINARY null;
			})
		);
	
		#Parse the file;
		while (is not null (-> "[bytes]" (file reader_get_bytes "r1" 402400));) {
	
			tunnel execute "destination_sess" (concat 
				"-> \"[bytes]\" \"" (mql encode [bytes]) "\";"
				(mql {
					file writer_add_bytes "w1" [bytes];
					file writer_flush "w1";
				})
			);
	
		};

		tunnel execute "destination_sess" (concat 
			(mql {
				#Force to write;
				file writer_flush "w1";
				file writer_close "w1";
			})
		);
	
		#Close the reader;
		file reader_close "r1";
		
		log write "Sended." OK null null;

		#Step 5 is valid;
		stack flow_step [FLOW_PID] 5 "load_ok";

		try {tunnel disconnect "destination_sess";} {} "[err]";
		
	} {

		#Close objects;
		try {file reader_close "r1";} {} "[sub_err]";
		try {tunnel execute "destination_sess" (concat 
			(mql {
				try {file writer_close "w1";} {} "[err]";
			})
		);} {} "[err]";
		try {tunnel disconnect "destination_sess";} {} "[err]";

		#Step 5 is not valid;
		stack flow_step [FLOW_PID] 5 "load_KO";
		
		# Give the error to the stack and stop the process;
		exception (1) ([global_err]);
	
	} "[global_err]";

} "Return nothing";
if (not (group is granted script "folder1.folder2.folder3.step_5_load.exe" "0001_folder1_folder2_folder3")) {
	group grant script "folder1.folder2.folder3.step_5_load.exe" "0001_folder1_folder2_folder3";
};

French Technology
© 2004-2024