diff --git a/custom-recipes/pi-system-af-tree/recipe.py b/custom-recipes/pi-system-af-tree/recipe.py
index c677ac2..ed4c7fd 100644
--- a/custom-recipes/pi-system-af-tree/recipe.py
+++ b/custom-recipes/pi-system-af-tree/recipe.py
@@ -1,22 +1,15 @@
-# -*- coding: utf-8 -*-
import dataiku
from dataiku.customrecipe import get_input_names_for_role, get_recipe_config, get_output_names_for_role
-import pandas as pd
from safe_logger import SafeLogger
from osisoft_plugin_common import (
- get_credentials, get_interpolated_parameters, normalize_af_path,
- get_combined_description, get_base_for_data_type, check_debug_mode,
- PerformanceTimer, get_max_count, check_must_convert_object_to_string,
- convert_schema_objects_to_string, get_summary_parameters, get_advanced_parameters,
- get_batch_parameters
+ get_credentials, PerformanceTimer
)
-from osisoft_client import OSIsoftClient
from osisoft_constants import OSIsoftConstants
logger = SafeLogger("pi-system plugin", forbiden_keys=["token", "password"])
-logger.info("PIWebAPI Assets values downloader recipe v{}".format(
+logger.info("PIWebAPI AF selector recipe v{}".format(
OSIsoftConstants.PLUGIN_VERSION
))
@@ -30,182 +23,48 @@ def get_step_value(item):
return None
+def next_tree_item(tree_data):
+ if not isinstance(tree_data, list):
+ return
+ for item in tree_data:
+ children = item.pop("children", [])
+ if children:
+ for child in next_tree_item(children):
+ yield child
+ yield item
+
+
input_dataset = get_input_names_for_role('input_dataset')
output_names_stats = get_output_names_for_role('api_output')
config = get_recipe_config()
-print("ALX:config={}".format(config))
-dku_flow_variables = dataiku.get_flow_variables()
+tree_data = config.get("treeData", [])
logger.info("Initialization with config config={}".format(logger.filter_secrets(config)))
auth_type, username, password, server_url, is_ssl_check_disabled = get_credentials(config)
-is_debug_mode = check_debug_mode(config)
-max_count = get_max_count(config)
-summary_type = config.get("summary_type")
-must_convert_object_to_string = check_must_convert_object_to_string(config)
-
-use_server_url_column = config.get("use_server_url_column", False)
-if not server_url and not use_server_url_column:
- raise ValueError("Server domain not set")
-
-path_column = config.get("path_column", "")
-if not path_column:
- raise ValueError("There is no parameter column selected.")
-
-data_type = config.get("data_type")
-start_time = config.get("start_time")
-end_time = config.get("end_time")
-use_start_time_column = config.get("use_start_time_column", False)
-start_time_column = config.get("start_time_column")
-use_end_time_column = config.get("use_end_time_column", False)
-end_time_column = config.get("end_time_column")
-server_url_column = config.get("server_url_column")
-use_batch_mode, batch_size = get_advanced_parameters(config)
-interval, sync_time, boundary_type = get_interpolated_parameters(config)
-record_boundary_type = config.get("record_boundary_type") if data_type == "RecordedData" else None
-summary_type, summary_duration = get_summary_parameters(config)
-do_duplicate_input_row = config.get("do_duplicate_input_row", False)
-max_request_size, estimated_density, maximum_points_returned = get_batch_parameters(config)
-max_time_to_retrieve_per_batch = estimated_density / maximum_points_returned #density per hour <- max time is in hour
network_timer = PerformanceTimer()
processing_timer = PerformanceTimer()
processing_timer.start()
-input_parameters_dataset = dataiku.Dataset(input_dataset[0])
output_dataset = dataiku.Dataset(output_names_stats[0])
-input_parameters_dataframe = input_parameters_dataset.get_dataframe()
-
-results = []
-time_last_request = None
-client = None
-previous_server_url = ""
-time_not_parsed = True
-
-input_columns = list(input_parameters_dataframe.columns) if do_duplicate_input_row else []
-
+schema = [
+ {'name': 'title', 'type': 'string'},
+ {'name': 'template_name', 'type': 'string'},
+ {'name': 'category_names', 'type': 'array'},
+ {'name': 'path', 'type': 'string'},
+ {'name': 'id', 'type': 'string'},
+ {'name': 'url', 'type': 'string'},
+ {'name': 'checked', 'type': 'boolean'},
+ {'name': 'expanded', 'type': 'boolean'},
+]
+output_dataset.write_schema(schema)
+
+selectedAttributes = config.get("selectedAttributes", [])
with output_dataset.get_writer() as writer:
- first_dataframe = True
- absolute_index = 0
- batch_buffer_size = 0
- buffer = []
- for index, input_parameters_row in input_parameters_dataframe.iterrows():
- absolute_index += 1
- server_url = input_parameters_row.get(server_url_column, server_url) if use_server_url_column else server_url
- start_time = input_parameters_row.get(start_time_column, start_time) if use_start_time_column else start_time
- end_time = input_parameters_row.get(end_time_column, end_time) if use_end_time_column else end_time
- row_name = input_parameters_row.get("Name")
- duplicate_initial_row = {}
- nb_rows_to_process = input_parameters_dataframe.shape[0]
- for input_column in input_columns:
- duplicate_initial_row[input_column] = input_parameters_row.get(input_column)
-
- if client is None or previous_server_url != server_url:
- client = OSIsoftClient(
- server_url, auth_type, username, password,
- is_ssl_check_disabled=is_ssl_check_disabled,
- is_debug_mode=is_debug_mode, network_timer=network_timer
- )
- previous_server_url = server_url
- if time_not_parsed:
- # make sure all OSIsoft time string format are evaluated at the same time
- # rather than at every request, at least for start / end times set in the UI
- time_not_parsed = False
- start_time = client.parse_pi_time(start_time)
- end_time = client.parse_pi_time(end_time)
- sync_time = client.parse_pi_time(sync_time)
-
- object_id = input_parameters_row.get(path_column)
- item = None
- if client.is_resource_path(object_id):
- object_id = normalize_af_path(object_id)
- item = client.get_item_from_path(object_id)
- step_value = get_step_value(item)
- if item:
- rows = client.recursive_get_rows_from_item(
- item,
- data_type,
- start_date=start_time,
- end_date=end_time,
- interval=interval,
- sync_time=sync_time,
- boundary_type=boundary_type,
- record_boundary_type=record_boundary_type,
- max_count=max_count,
- can_raise=False,
- object_id=object_id,
- summary_type=summary_type,
- summary_duration=summary_duration
- )
- elif use_batch_mode:
- buffer.append({"WebId": object_id})
- batch_buffer_size += 1
- if (batch_buffer_size >= batch_size) or (absolute_index == nb_rows_to_process):
- rows = client.get_rows_from_webids(
- buffer, data_type, max_count=max_count,
- start_date=start_time,
- end_date=end_time,
- interval=interval,
- sync_time=sync_time,
- boundary_type=boundary_type,
- record_boundary_type=record_boundary_type,
- can_raise=False,
- batch_size=batch_size,
- object_id=object_id,
- summary_type=summary_type,
- summary_duration=summary_duration,
- endpoint_type="AF",
- estimated_density=estimated_density,
- maximum_points_returned=maximum_points_returned
- )
- batch_buffer_size = 0
- buffer = []
- else:
- continue
- else:
- rows = client.recursive_get_rows_from_webid(
- object_id,
- data_type,
- start_date=start_time,
- end_date=end_time,
- interval=interval,
- sync_time=sync_time,
- boundary_type=boundary_type,
- record_boundary_type=record_boundary_type,
- max_count=max_count,
- can_raise=False,
- endpoint_type="AF",
- summary_type=summary_type,
- summary_duration=summary_duration
- )
- for row in rows:
- row["Name"] = row_name
- row[path_column] = object_id
- if isinstance(row, list):
- for line in row:
- base = get_base_for_data_type(data_type, object_id, Step=step_value)
- base.update(line)
- extention = client.unnest_row(base)
- results.extend(extention)
- else:
- base = get_base_for_data_type(data_type, object_id, Step=step_value)
- if duplicate_initial_row:
- base.update(duplicate_initial_row)
- base.update(row)
- extention = client.unnest_row(base)
- results.extend(extention)
-
- unnested_items_rows = pd.DataFrame(results)
- if first_dataframe:
- default_columns = OSIsoftConstants.RECIPE_SCHEMA_PER_DATA_TYPE.get(data_type)
- if must_convert_object_to_string:
- default_columns = convert_schema_objects_to_string(default_columns)
- combined_columns_description = get_combined_description(default_columns, unnested_items_rows)
- output_dataset.write_schema(combined_columns_description)
- first_dataframe = False
- if not unnested_items_rows.empty:
- writer.write_dataframe(unnested_items_rows)
- results = []
+ for item in selectedAttributes :
+ if item.get("checked", True) is True:
+ writer.write_row_dict(item)
processing_timer.stop()
logger.info("Overall timer:{}".format(processing_timer.get_report()))
diff --git a/js/pi-system_treecontroller.js b/js/pi-system_treecontroller.js
index 7507d1e..09c97d8 100644
--- a/js/pi-system_treecontroller.js
+++ b/js/pi-system_treecontroller.js
@@ -14,41 +14,6 @@ app.service('TreeDataService', function() {
};
});
-app.controller('TreeCtrl', ['$scope', '$http','CreateModalFromTemplate', 'TreeDataService', function($scope, $http, CreateModalFromTemplate, TreeDataService) {
-$scope.init = function() {
- $http.get('/plugins/pi-system/resource/tree.json')
- .then(function(response) {
- TreeDataService.setTreeData(response.data);
- $scope.treeData = TreeDataService.getTreeData();
- })
-}
-
-
- // Toggle récursif des checkboxes
- $scope.toggleChildren = function(node) {
- console.log("ALX:tc:" + JSON.stringify(node));
- if (node.children && node.children.length) {
- node.children.forEach(function(child) {
- child.checked = node.checked;
- $scope.toggleChildren(child);
- });
- }
- };
-
- $scope.getChildrenFromDB = function(item){
- console.log("ALX:gcfd:" + JSON.stringify(item));
- $scope.callPythonDo({ method: "get_children_from_db", parent: item })
- .then(function (data) {
- console.log("ALX:data1=" + JSON.stringify(data));
- item.children = data.choices;
- item.children.forEach(child => {
- child.checked = item.checked;
- child.expanded = item.expanded;
- });
- });
- }
-}]);
-
app.controller('AfExplorerFormCtrl', [
'$scope',
'$stateParams',
@@ -62,6 +27,8 @@ app.controller('AfExplorerFormCtrl', [
};
$scope.treeData = TreeDataService.getTreeData();
+ $scope.config.attributeList = $scope.config.attributeList || [];
+ $scope.config.selectedAttributes = $scope.config.selectedAttributes || [];
$scope.editorOptions = CodeMirrorSettingService.get("text/plain");
@@ -86,41 +53,40 @@ app.controller('AfExplorerFormCtrl', [
}).error(setErrorInScope.bind($scope.errorScope));
};
- $scope.getServers = function(noken){
- console.log("ALX:get servers");
- console.log("ALX:" + JSON.stringify(noken));
+ $scope.getServers = function(){
$scope.callPythonDo({parameterName: "server_name"}).then(function(data){
- console.log("ALX:getServers return:"+JSON.stringify(data))
- // $scope.config["server_name"] = data.choices;
$scope.server_name = data.choices;
});
};
$scope.getDatabases = function() {
$scope.callPythonDo({parameterName: "database_name"}).then(function(data){
- console.log("ALX:getDatabases return:"+JSON.stringify(data))
$scope.database_name = data.choices;
});
};
$scope.initializeTree = function(){
- console.log("ALX:initializeTree:scope=" + JSON.stringify($scope.config.database_name));
+ console.log("initialization: ");
+ console.log($scope.config.treeData);
+ if (!$scope.config.treeData || $scope.config.treeData.length === 0){
$scope.callPythonDo({method: "get_children_from_db", parent: $scope.config.database_name}).then(function(data){
console.log("ALX:data2=" + JSON.stringify(data));
TreeDataService.setTreeData(data.choices);
- $scope.treeData = TreeDataService.getTreeData();
+ $scope.config.treeData = TreeDataService.getTreeData();
});
+ }
};
$scope.getChildrenFromDB = function(item){
console.log("ALX:gcfd:" + JSON.stringify(item));
- $scope.callPythonDo({ method: "get_children_from_db", parent: item })
+ return $scope.callPythonDo({ method: "get_children_from_db", parent: item })
.then(function (data) {
console.log("ALX:data1=" + JSON.stringify(data));
item.children = data.choices;
item.children.forEach(child => {
- child.checked = item.checked;
child.expanded = false;
});
+ console.log(item);
+ return item;
});
}
@@ -128,14 +94,75 @@ app.controller('AfExplorerFormCtrl', [
// Toggle récursif des checkboxes
$scope.toggleChildren = function(node) {
console.log("ALX:tc:" + JSON.stringify(node));
+ node.expanded = !node.expanded;
+ $scope.getChildrenFromDB(node);
if (node.children && node.children.length) {
node.children.forEach(function(child) {
- child.checked = node.checked;
- $scope.toggleChildren(child);
+ child.expanded = !child.expanded;
+ $scope.getChildrenFromDB(child);
});
}
+
};
+ $scope.doSearch = function(element_name, attribute_name){
+ $scope.callPythonDo({method: "do_search", element_name: element_name, attribute_name: attribute_name, root_tree: $scope.config.treeData}).then(
+ function(data){
+ TreeDataService.setTreeData(data.choices);
+ $scope.config.treeData = TreeDataService.getTreeData();
+ $scope.config.attributeList = data.attributes;
+ $scope.config.selectedAttributes = [];
+ }
+ );
+ };
+
+ $scope.updateAttributeToOutput = function (attribute) {
+ if (attribute.checked && $scope.config.selectedAttributes.includes(attribute)) {
+ $scope.config.selectedAttributes = $scope.config.selectedAttributes.filter(attr => attr.path !== attribute.path);
+ }
+ else {
+ console.log("Adding attribute to output:", attribute);
+
+ if (!$scope.config || !$scope.config.attributeList || $scope.config.selectedAttributes.includes(attribute)) {
+ return;
+ }
+ const attrInConfig = $scope.config.attributeList.find(attr => attr.path === attribute.path);
+
+ if (attrInConfig) {
+ $scope.config.selectedAttributes.push(attribute);
+ attrInConfig.checked = true;
+ } else {
+ console.warn("Attribute not found in config:", attribute.path);
+ }
+ }
+ };
+
+
+$scope.displayAttributes = function(node) {
+
+ if (!node.children || node.children.length === 0) {
+ $scope.getChildrenFromDB(node).then(newNode => {
+ processNode(newNode);
+ });
+ } else {
+ processNode(node);
+ };
+ }
+
+function processNode(node) {
+ $scope.config.attributeList = [];
+ $scope.config.selectedAttributes = [];
+ node.children.forEach(child => {
+ if (child.type === "attribute") {
+ $scope.config.attributeList.push({
+ "name": child.title,
+ "path": child.path
+ });
+ }
+ });
+}
+
+
}]);
app.directive('treeNode', function() {
@@ -145,40 +172,66 @@ app.directive('treeNode', function() {
template: `
-
-
+
▼
▶
-
-
-
`,
link: function(scope) {
- // Récupère la fonction toggleChildren du parent
scope.toggleChildren = scope.$parent.toggleChildren;
scope.getChildrenFromDB = scope.$parent.getChildrenFromDB;
- // Simple toggle du expand (plus de chargement HTTP)
+ scope.doSearch = scope.$parent.doSearch;
+ scope.config = scope.$parent.config;
+ scope.attributeList = scope.config.attributeList || [];
+ scope.displayAttributes = scope.$parent.displayAttributes;
scope.toggleExpand = function(node) {
- console.log("ALX:expand !" + JSON.stringify(node));
node.expanded = !node.expanded;
- scope.getChildrenFromDB(node);
+
+ if (node.expanded && (!node.children || !node.children.length)) {
+ scope.getChildrenFromDB(node);
+ }
+ };
+
+ scope.hasAttributes = function(node) {
+ if (!Array.isArray(scope.$parent.config.attributeList) || scope.$parent.config.attributeList.length === 0) {
+ return false;
+ }
+
+ return scope.$parent.config.attributeList.some(child => {
+ const expected = node.title + "|" + child.title;
+ return child.path.endsWith(expected);
+ });
};
+
+ scope.isElement = function(child) {
+ return child.type === 'element';
+ }
}
};
});
diff --git a/python-lib/osisoft_client.py b/python-lib/osisoft_client.py
index 955e411..71c39c1 100644
--- a/python-lib/osisoft_client.py
+++ b/python-lib/osisoft_client.py
@@ -13,6 +13,7 @@
iso_to_epoch, RecordsLimit, is_iso8601, get_next_page_url, change_key_in_dict,
BatchTimeCounter
)
+from osisoft_plugin_common import get_item_details
from osisoft_pagination import OffsetPagination
from safe_logger import SafeLogger
@@ -732,6 +733,8 @@ def search_attributes(self, database_webid, **kwargs):
"query": query,
"databaseWebId": database_webid
}
+ if "search_associations" in kwargs:
+ params["associations"] = kwargs.get("search_associations")
json_response = self.get(url=search_attributes_base_url, headers=headers, params=params)
if OSIsoftConstants.DKU_ERROR_KEY in json_response:
yield json_response
@@ -810,9 +813,69 @@ def traverse(self, path_elements):
json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse")
if attribute:
item = self.extract_item_with_name(json_response, attribute)
+ return item
+
+ def traverse_and_cache(self, path_elements, path_attributes, tree):
+ full_path_elements = path_elements.copy() + path_attributes.copy()
+ if tree.exists(full_path_elements):
+ # this path has already been retrieved, so skip
+ return
+ # Loading piwebapi initial page
+ next_url = self.endpoint.get_base_url()
+ headers = self.get_requests_headers()
+ json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse_and_cache")
+
+ # Asset server page
+ next_url = self.extract_link_with_key(json_response, "AssetServers")
+ json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse_and_cache")
+
+ item = self.extract_item_with_name(json_response, path_elements.pop(0))
+ tree.put(full_path_elements[0:1], get_item_details(item))
+ next_url = self.extract_link_with_key(item, "Databases")
+ json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse_and_cache")
+ # retrieved_from_cache = tree.get(full_path_elements[0:2], {}).get("url")+"/elements"
+ # get the database
+ item = self.extract_item_with_name(json_response, path_elements.pop(0))
+ tree.put(full_path_elements[0:2], get_item_details(item))
+ next_url = self.extract_link_with_key(item, "Elements")
+ json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse_and_cache")
+
+ # Looping through elements
+ counter = 3
+ before_last_url = None
+ for path_element in path_elements:
+ element, attribute = self.split_element_attribute(path_element) # <-daxy shtob, attribute ken
+ item = self.extract_item_with_name(json_response, element)
+ tree.put(full_path_elements[0:counter], get_item_details(item))
+ counter += 1
+ before_last_url = self.extract_link_with_key(item, "Attributes")
+ next_url = self.extract_link_with_key(item, "Elements")
+ json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse_and_cache")
+ json_response = self.get(url=before_last_url, headers=headers, params={}, error_source="traverse_and_cache")
+ before_last_json = None
+ for path_attribute in path_attributes:
+ item = self.extract_item_with_name(json_response, path_attribute)
+ item_details = get_item_details(item)
+ item_details["checked"] = True # That should not be done here
+ tree.put(full_path_elements[0:counter], item_details)
+ counter += 1
+ next_url = self.extract_link_with_key(item, "Attributes")
+ if next_url:
+ before_last_json = json_response.copy()
+ json_response = self.get(url=next_url, headers=headers, params={}, error_source="traverse_and_cache")
+ else:
+ break
+ items = before_last_json.get(OSIsoftConstants.API_ITEM_KEY, [])
+ for item in items:
+ item_details = get_item_details(item)
+ item_details["checked"] = False # That should not be done here
+ tree.put(full_path_elements[0:counter-2] + [item_details.get("title")], item_details)
return item
+ def cache_all_attributes(self, elements_paths_tokens, tree):
+ pass
+
def split_element_attribute(self, path_element):
attribute = None
path_elements = path_element.split("|")
diff --git a/python-lib/osisoft_plugin_common.py b/python-lib/osisoft_plugin_common.py
index 6f8b8ed..82cc580 100644
--- a/python-lib/osisoft_plugin_common.py
+++ b/python-lib/osisoft_plugin_common.py
@@ -634,3 +634,121 @@ def is_batch_full(self):
def add(self, start_time, end_time, interval):
self.total_batched_time += compute_time_spent(start_time, end_time, interval)
+
+
+def get_item_details(item):
+ KEYS_TO_CHECK = {
+ "Name": "title", "TemplateName": "template_name", "CategoryNames": "category_names",
+ "HasChildren": "has_children", "Path": "path", "WebId": "id", "checked": "checked"
+ } # should we stick to python naming convention or keep pi's ones throughout ?
+ details = {}
+ for key_to_check in KEYS_TO_CHECK:
+ value = item.get(key_to_check)
+ if value:
+ details[KEYS_TO_CHECK.get(key_to_check)] = value
+ details["url"] = item.get("Links", {}).get("Self")
+ details["type"] = "attribute" if "|" in details.get("path", "") else "element"
+ return details
+
+
+class Tree():
+ # Each put
+ # - stores the data in the index
+ # - builds a tree based on the data's path, pointing at the right index
+ def __init__(self, root_tree=None):
+ self.tree = {}
+ self.index = []
+ if root_tree:
+ self._ingest(root_tree)
+
+ def _ingest(self, root_tree, parent_path=None):
+ parent_path = parent_path or []
+ if isinstance(root_tree, list):
+ for item in root_tree:
+ if not parent_path:
+ path = item.get("path", "")
+ parent_path = path.split("\\")[2:][0:2]
+ item_children = item.pop("children", [])
+ title = item.get("title")
+ self._ingest(item_children, parent_path=parent_path + [title])
+ path = item.get("path", "")
+ self.put(parent_path + [title], item)
+
+ def put(self, path, data):
+ if isinstance(path, list):
+ current_level = self.tree
+ for token in path:
+ if token not in current_level:
+ current_level[token] = {}
+ current_level = current_level.get(token)
+ index_to_update = current_level.get("_v", None)
+ if index_to_update is not None:
+ self.index[index_to_update] = data
+ else:
+ last_index = len(self.index)
+ self.index.append(data)
+ current_level.update({"_v": last_index})
+
+ def get(self, path, default=None):
+ if isinstance(path, list):
+ current_level = self.tree
+ for token in path:
+ if token not in current_level:
+ return default
+ else:
+ current_level = current_level.get(token)
+ index = current_level.get("_v")
+ return self.get_record(index)
+
+ def get_tree(self):
+ return self.tree
+
+ def get_record(self, index):
+ if index < len(self.index):
+ return self.index[index]
+ return None
+
+ def get_records(self):
+ return self.index
+
+ def exists(self, path):
+ current = self.tree
+ if isinstance(path, list):
+ for token in path:
+ current = current.get(token, {})
+ if not current:
+ return False
+ return True
+ return False
+
+ def print(self):
+ print("Tree {}".format(self.tree))
+ print("Tree content {}".format(self.index))
+
+
+def recursive_tree_rebuild(dictionary, records, counter=None):
+ counter = counter or -1
+ output = []
+
+ for key in dictionary:
+ if key == "_v":
+ continue
+ sub_dictionary = dictionary.get(key)
+ context = {}
+ if "_v" in sub_dictionary:
+ index_id = sub_dictionary.get("_v")
+ if isinstance(index_id, int):
+ context = records[index_id]
+ counter += 1
+ if sub_dictionary:
+ counter += 1
+ children = recursive_tree_rebuild(sub_dictionary, records, counter + 1)
+ else:
+ children = []
+ # context["id"] = str(counter)
+ context["title"] = key
+ context["expanded"] = True
+ # context["checked"] = False
+ context["children"] = children
+ output.append(context)
+ return output
diff --git a/resource/browse_af_tree.py b/resource/browse_af_tree.py
index 929a9b7..91b3f6d 100644
--- a/resource/browse_af_tree.py
+++ b/resource/browse_af_tree.py
@@ -1,11 +1,12 @@
from osisoft_client import OSIsoftClient
from osisoft_plugin_common import get_credentials, build_select_choices, check_debug_mode
+from osisoft_plugin_common import get_item_details, Tree, recursive_tree_rebuild
import dataiku
def do(payload, config, plugin_config, inputs):
input_tree = None
- if len(inputs)>0:
+ if len(inputs) > 0:
input_item = inputs[0]
input_type = input_item.get("type")
if input_type == "DATASET":
@@ -14,7 +15,6 @@ def do(payload, config, plugin_config, inputs):
input_tree = input_dataset.get_dataframe(infer_with_pandas=False)
config["is_ssl_check_disabled"] = True
- print("ALX:af explorer do, payload={}, config={}, plugin_config={}, inputs={}".format(payload, config, plugin_config, inputs))
if "config" in config:
config = config.get("config")
if "credentials" not in config:
@@ -41,7 +41,6 @@ def do(payload, config, plugin_config, inputs):
is_debug_mode = check_debug_mode(config)
is_ssl_check_disabled = True
- print("ALX:is_ssl_check_disabled={}".format(is_ssl_check_disabled))
client = OSIsoftClient(server_url, auth_type, username, password, is_ssl_check_disabled=is_ssl_check_disabled, is_debug_mode=is_debug_mode)
@@ -56,24 +55,47 @@ def do(payload, config, plugin_config, inputs):
database_name = config.get("database_name")
element_name = config.get("element_name")
attribute_name = config.get("attribute_name")
+ # root_tree = payload.get("root_tree")
+ root_tree = config.get("treeData", [])
+ root_tree = shorten_tree(root_tree)
attributes = []
# https://dku-qa-osi.francecentral.cloudapp.azure.com/piwebapi/assetdatabases/F1RD3VEt1yTvt0ip6-a5yeEVsgbMcrwu_Je0qg9btcZIvPswT1NJU09GVC1QSS1TRVJWXFdFTEw
database_webid = database_name.split("/")[-1]
+ # element_query_keys = {
+ # "element_name": "Name:'{}'",
+ # "search_root_path": "Root:'{}'",
+ # "element_template": "Template:'{}'",
+ # "element_type": "Type:'{}'",
+ # "element_category": "CategoryName:'{}'"
+ # }
+ # attribute_query_keys = {
+ # "attribute_name": "Name:'{}'",
+ # "attribute_category": "CategoryName:'{}'",
+ # "attribute_value_type": "Type:'{}'"
+ # }
for attribute in client.search_attributes(
- database_webid, attribute_name=attribute_name, element_name=element_name):
- print("ALX:attribute={}".format(attribute))
+ database_webid,
+ attribute_name=attribute_name,
+ element_name=element_name,
+ search_associations="Paths"
+ ):
+ attribute["checked"] = False
attributes.append(attribute)
- return {"choices": attributes}
+ attributes = duplicate_linked_attributes(attributes)
+ items = []
+ for attribute in attributes:
+ item = get_item_details(attribute)
+ items.append(item)
+ attributesCopy = items.copy()
+ rebuilt_tree = rebuild_tree(client, items, root_tree)
+ return {"choices": rebuilt_tree, "attributes": attributesCopy}
parameter_name = payload.get("parameterName")
if parameter_name == "server_name":
choices = []
- print("ALX:do function")
servers = client.get_asset_servers(can_raise=False)
- print("ALX:servers={}".format(servers))
choices.extend(servers)
- print("ALX:server choices={}".format(choices))
return build_select_choices(choices)
if parameter_name == "data_server_url":
@@ -89,26 +111,23 @@ def do(payload, config, plugin_config, inputs):
return build_select_choices(choices)
else:
return build_select_choices()
+ if parameter_name == "treeData":
+ return {"choices": config.get("treeData")}
return build_select_choices()
def get_query_catalogs(cnx, config):
- print("ALX:def get_query_catalogs")
- print("ALX:cnx={}, config={}".format(cnx, config))
user = config.get("credentials", {}).get("osisoft_basic", {}).get("user")
password = config.get("credentials", {}).get("osisoft_basic", {}).get("password")
return {"choices": [user, password]}
def get_children_from_db(client, parent_node, database_name=None):
- print("ALX:parent_node={}".format(parent_node))
- # ALX:parent_node={'show_advanced_parameters': False, 'use_server_url_column': False, 'is_ssl_check_disabled': True, 'must_convert_object_to_string': False, 'is_debug_mode': False, 'credentials': {'auth_type': 'basic', 'can_disable_ssl_check': True, 'ssl_cert_path': '', 'default_server': 'dku-qa-osi.francecentral.cloudapp.azure.com', 'can_override_server_url': True, 'get_parameters': {}, 'post_parameters': {}, 'url_swap': [], 'max_request_size': 1000, 'estimated_density': 6, 'maximum_points_returned': 600, 'osisoft_basic': {'user': 'abourret', 'password': 'S58BirZjtsUDTJ3'}}}
if isinstance(parent_node, dict):
url = parent_node.get("url", database_name)
else:
url = parent_node
- print("ALX:url to search:{}".format(url))
this_node = next(client.get_next_item_from_url(url))
links = this_node.get("Links", {})
attributes_url = links.get("Attributes")
@@ -118,6 +137,7 @@ def get_children_from_db(client, parent_node, database_name=None):
elements = client.get_next_item_from_url(elements_url)
for element in elements:
child = get_item_details(element)
+ # child["title"] = "🧩{}".format(child.get("title"))
child["type"] = "element"
child["children"] = []
children.append(child)
@@ -125,6 +145,7 @@ def get_children_from_db(client, parent_node, database_name=None):
attributes = client.get_next_item_from_url(attributes_url)
for attribute in attributes:
child = get_item_details(attribute)
+ # child["title"] = "🏷️{}".format(child.get("title"))
child["type"] = "attribute"
if child.get("has_children"):
child["children"] = []
@@ -132,13 +153,89 @@ def get_children_from_db(client, parent_node, database_name=None):
return {"choices": children}
+# method2:
+# we dig, but this time it's index[token name], and we store as we go in the child, with the real data indexed in a list and just the rank pointing to it
+# to build the final tree, we browse the index, get the index data, rebuild the struct from there
+# Tree class ? put(path, data), get(path, data)
+
+
+def rebuild_tree(client, items, root_tree=None):
+ # builds an active tree containing all the items and their parent up to the root
+ tree = Tree(root_tree=root_tree)
+ tree.print()
+ while len(items) > 1:
+ item = items.pop()
+ if item is None:
+ break
+ find_all_ancestors(client, item, tree)
+ update_item(item, tree)
+ result = recursive_tree_rebuild(tree.get_tree(), tree.get_records())
+ result = drop_first_levels(result)
+ return result
+
+
+def drop_first_levels(result):
+ # recursively removes the 2 first levels of the returned tree
+ # (server and DB)
+ output_result = []
+ for item in result:
+ path = item.get("path", "")
+ path_length = len(path.split("\\"))
+ if path_length >= 5:
+ output_result.append(item)
+ else:
+ children = item.get("children", [])
+ output_result = drop_first_levels(children)
+ return output_result
+
+
+def find_all_ancestors(client, item, tree):
+ # Find all the ancestors of an item
+ elements_paths_tokens, attributes_paths_tokens = path_to_list(item.get("path"))
+ client.traverse_and_cache(elements_paths_tokens, attributes_paths_tokens, tree)
+
+
+def combine_trees(final_tree, all_item_s_ancestors):
+ # combine two trees with partial overlap and common root ancestor
+ return final_tree
+
+
+# elements, attributes
+def path_to_list(path):
+ if not path:
+ return []
+ return path.split('|')[0].split('\\')[2:], (path.split('|')[1:])
+
+
+def shorten_tree(tree):
+ if isinstance(tree, list):
+ for node in tree:
+ if "expanded" in node:
+ # node.pop("expanded", None)
+ node["expanded"] = False
+ if "children" in node:
+ shorten_tree(node.get("children", []))
+ return tree
+
+
+def duplicate_linked_attributes(attributes):
+ duplicated_attributes = []
+ for attribute in attributes:
+ paths = attribute.pop("Paths", [attribute.get("Path")])
+ for path in paths:
+ this_attribute = attribute.copy()
+ this_attribute["Path"] = path
+ this_attribute["type"] = "attribute" if "|" in path else "element"
+ duplicated_attributes.append(this_attribute)
+ return duplicated_attributes
+
+
+def set_as_selected(items):
+ for item in items:
+ item["checked"] = True
+ return items
+
-def get_item_details(item):
- KEYS_TO_CHECK = {"Name": "title", "TemplateName": "template_name", "CategoryNames": "category_names", "HasChildren": "has_children", "Path": "path", "WebId": "id"}
- details = {}
- for key_to_check in KEYS_TO_CHECK:
- value = item.get(key_to_check)
- if value:
- details[KEYS_TO_CHECK.get(key_to_check)] = value
- details["url"] = item.get("Links", {}).get("Self")
- return details
+def update_item(item, tree):
+ elements_paths_tokens, attributes_paths_tokens = path_to_list(item.get("path"))
+ tree.put(elements_paths_tokens + attributes_paths_tokens, item)
diff --git a/resource/pi-system_af-explorer.css b/resource/pi-system_af-explorer.css
new file mode 100644
index 0000000..575492b
--- /dev/null
+++ b/resource/pi-system_af-explorer.css
@@ -0,0 +1,16 @@
+.fh.w800.oa {
+ width: 100% !important;
+}
+
+.tree-node__label--clickable{
+ background-color: yellow;
+}
+.pi-system-explorer__main {
+ display: grid;
+ grid-template-columns: 500px auto;
+ column-gap: 50px;
+ margin-top: 20px;
+}
+.pi-system-explorer__tree-view, .pi-system-explorer__center-view {
+ border: 1px solid #ccc; padding: 10px; border-radius: 5px;
+}
diff --git a/resource/pi-system_af-explorer.html b/resource/pi-system_af-explorer.html
index cf36fa1..d331c88 100644
--- a/resource/pi-system_af-explorer.html
+++ b/resource/pi-system_af-explorer.html
@@ -1,3 +1,4 @@
+
@@ -38,12 +39,55 @@
-
-
+
+
+
+
Elements
+
+
+
+
+
+
+
+
+
+
Attributes
+
+
+
+
+
+
+
+
+