Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

IVS-307 performance #348

Open
wants to merge 4 commits into
base: development
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions features/CTX000_Presentation-colours-and-textures.feature
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
@implementer-agreement
@CTX
@version1
@version2
@E00020

Feature: CTX000 - Presentation colours and textures
Expand All @@ -22,8 +22,10 @@ Feature: CTX000 - Presentation colours and textures

Scenario: Check for activation - Styled Materials

Given an IfcRoot
Given an IfcObjectDefinition
Given its attribute HasAssociations
Given its entity type is 'IfcRelAssociatesMaterial'
Given its attribute RelatingMaterial
Given all referenced instances
Given its entity type is 'IfcMaterial'
Given its attribute HasRepresentation
Expand Down
2 changes: 1 addition & 1 deletion features/SPS007_Spatial-containment.feature
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ The rule verifies that spatial containment via IfcRelContainedInSpatialStructure


Scenario: All other IFC entities must not be contained within a spatial structure
Given An IfcRoot
Given An IfcProduct
Given Its Type is not 'IfcElement' including subtypes
Given Its Type is not 'IfcGrid'
Given Its Type is not 'IfcAnnotation'
Expand Down
21 changes: 4 additions & 17 deletions features/steps/thens/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from utils import ifc43x_alignment_validation as ifc43
from utils.geometry import AlignmentSegmentContinuityCalculation
from utils import ifc
from validation_handling import gherkin_ifc
from validation_handling import full_stack_rule, gherkin_ifc
from . import ValidationOutcome, OutcomeSeverity

from parse_type import TypeBuilder
Expand Down Expand Up @@ -163,20 +163,6 @@ def pretty_print_expected_geometry_types(exp: List[Dict]) -> Union[str, None]:
return ", ".join(pretty)


def ala003_activation_inst(inst, context) -> Union[ifcopenshell.entity_instance | None]:
"""
Used in ALA003 as reverse traversal of graph to locate the correct business logic entity
"""
for candidate in context._stack[2]["instances"]:
if candidate is None:
return None
else:
for rep in candidate.Representations:
for item in rep.Items:
if item.id() == inst.id():
return candidate.ShapeOfProduct[0]


@gherkin_ifc.step(
'A representation by {ifc_rep_criteria} requires the {existence:absence_or_presence} of {entities} in the business logic')
def step_impl(context, inst, ifc_rep_criteria, existence, entities):
Expand Down Expand Up @@ -278,10 +264,11 @@ def step_impl(context, inst):


@gherkin_ifc.step('Each segment must have the same geometry type as its corresponding {activation_phrase}')
def step_impl(context, inst, activation_phrase):
@full_stack_rule
def step_impl(context, inst, path, activation_phrase):
if inst is not None:
# retrieve activation instance entity from the attribute stack
activation_ent = ala003_activation_inst(inst, context)
activation_ent = path[0]
if activation_ent is not None:

if activation_ent.is_a().upper() == "IFCALIGNMENT":
Expand Down
116 changes: 65 additions & 51 deletions features/steps/thens/relations.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,10 @@ def upper_case_if_string(v):
return v.upper()
except:
return v

def get_pset_definitions(context, table):
schema_specific_path = system.get_abs_path(f"resources/{context.model.schema.upper()}/{table}.csv")

@functools.cache
def get_pset_definitions(schema, table):
schema_specific_path = system.get_abs_path(f"resources/{schema.upper()}/{table}.csv")

if os.path.exists(schema_specific_path):
tbl_path = schema_specific_path
Expand All @@ -203,6 +204,64 @@ def get_pset_definitions(context, table):
tbl = system.get_csv(tbl_path, return_type='dict')
return {d['property_set_name']: d for d in tbl}

class always_equal_dict(dict):
"""
Just to accept as an argument to functools.cache, make sure that
other arguments are present to differentiate cache entries.
>>> {always_equal_dict({}): 1} | {always_equal_dict({'a':1}):2}
{{}: 2}
"""
def __hash__(self):
return 0
def __eq__(self, _):
return True

@functools.cache
def establish_accepted_pset_values(name : str, _schema : str, _table : str, property_set_definitions : always_equal_dict):
# _schema and _table are only for cache key, property_set_definitions is derived from _schema, _table,
# but unhashable because it's a dict
def make_obj(s):
if s:
return json.loads(s.replace("'", '"'))
else:
return ''

try:
property_set_attr = property_set_definitions[name]
except KeyError: # Pset_ not found in template
property_set_attr = ''
return property_set_attr

accepted_values = {}
accepted_values['template_type'] = property_set_attr.get('template_type', '')

accepted_values['property_names'] = []
accepted_values['property_types'] = []
accepted_values['data_types'] = []

for property_def in make_obj(property_set_attr['property_definitions']):
accepted_values['property_names'].append(property_def['property_name'])
accepted_values['property_types'].append(property_def['property_type'])
accepted_values['data_types'].append(property_def['data_type'])

accepted_values['applicable_entities'] = [s.split('/')[0] for s in make_obj(property_set_attr['applicable_entities'])]

# in the ifc2x3 data, predefined type restrictions are imposed as:
# | | | applicable_type_value | |
# | | | {entity}.PredefinedType={predefinedtype} | |
if property_set_attr['applicable_type_value'] and '.PredefinedType=' in property_set_attr['applicable_type_value']:
ptype = property_set_attr['applicable_type_value'].split('.PredefinedType=')[1].upper()
accepted_values['applicable_entities_with_predefined_types'] = list(zip(
accepted_values['applicable_entities'],
(ptype for _ in itertools.count())
))
else:
# in the ifc4 data, predefined type restrictions are imposed as:
# | | | applicable_entities | |
# | | | ['{entity}','{entity}/{predefinedtype}','{entity2}/{predefinedtype2}'] | |
accepted_values['applicable_entities_with_predefined_types'] = [((ab[0], ab[1].upper()) if len(ab) == 2 else (ab[0], None)) for ab in (s.split('/') for s in make_obj(property_set_attr['applicable_entities']))]

return accepted_values

@gherkin_ifc.step('The IfcPropertySet Name attribute value must use predefined values according to the "{table}" table')
@gherkin_ifc.step('The IfcPropertySet must be assigned according to the property set definitions table "{table}"')
Expand All @@ -211,60 +270,15 @@ def get_pset_definitions(context, table):
@gherkin_ifc.step('Each associated IfcProperty value must be of data type according to the property set definitions table "{table}"')
def step_impl(context, inst, table):

property_set_definitions = get_pset_definitions(context, table)

def establish_accepted_pset_values(name, property_set_definitions):
def make_obj(s):
if s:
return json.loads(s.replace("'", '"'))
else:
return ''

try:
property_set_attr = property_set_definitions[name]
except KeyError: # Pset_ not found in template
property_set_attr = ''
return property_set_attr

accepted_values = {}
accepted_values['template_type'] = property_set_attr.get('template_type', '')

accepted_values['property_names'] = []
accepted_values['property_types'] = []
accepted_values['data_types'] = []

for property_def in make_obj(property_set_attr['property_definitions']):
accepted_values['property_names'].append(property_def['property_name'])
accepted_values['property_types'].append(property_def['property_type'])
accepted_values['data_types'].append(property_def['data_type'])

accepted_values['applicable_entities'] = [s.split('/')[0] for s in make_obj(property_set_attr['applicable_entities'])]

# in the ifc2x3 data, predefined type restrictions are imposed as:
# | | | applicable_type_value | |
# | | | {entity}.PredefinedType={predefinedtype} | |
if property_set_attr['applicable_type_value'] and '.PredefinedType=' in property_set_attr['applicable_type_value']:
ptype = property_set_attr['applicable_type_value'].split('.PredefinedType=')[1].upper()
accepted_values['applicable_entities_with_predefined_types'] = list(zip(
accepted_values['applicable_entities'],
(ptype for _ in itertools.count())
))
else:
# in the ifc4 data, predefined type restrictions are imposed as:
# | | | applicable_entities | |
# | | | ['{entity}','{entity}/{predefinedtype}','{entity2}/{predefinedtype2}'] | |
accepted_values['applicable_entities_with_predefined_types'] = [((ab[0], ab[1].upper()) if len(ab) == 2 else (ab[0], None)) for ab in (s.split('/') for s in make_obj(property_set_attr['applicable_entities']))]

return accepted_values

property_set_definitions = get_pset_definitions(context.model.schema, table)
name = getattr(inst, 'Name', 'Attribute not found')


if 'IfcPropertySet Name attribute value must use predefined values according' in context.step.name:
if name not in property_set_definitions.keys():
yield ValidationOutcome(inst=inst, observed = {'value':name}, severity=OutcomeSeverity.ERROR)
return

accepted_values = establish_accepted_pset_values(name, property_set_definitions)
accepted_values = establish_accepted_pset_values(name, context.model.schema, table, always_equal_dict(property_set_definitions))

if accepted_values: # If not it's a custom Pset_ prefixed attribute, e.g. Pset_Mywall (no need for further Pset_ checks),

Expand Down
14 changes: 6 additions & 8 deletions features/steps/validation_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,15 +208,13 @@ def apply_then_operation(fn, inst, context, current_path, depth=0, **kwargs):
if inst is None:
return
if context.is_full_stack_rule:
x = misc.get_stack_tree(context)[::-1]
value_path = []
idxs = [current_path[0:i+1] for i in range(len(current_path))]
for idx, layer in zip(idxs, x):
v = layer
while idx:
i, *idx = idx
v = v[i]
value_path.append(v)
for val in misc.get_stack_tree(context)[::-1]:
i = 0
while not should_apply(val, 0):
val = val[current_path[i]]
i += 1
value_path.append(val)
kwargs = kwargs | {'path': value_path}
top_level_index = current_path[0] if current_path else None
activation_inst = inst if not current_path or activation_instances[top_level_index] is None else activation_instances[top_level_index]
Expand Down