Skip to content

Commit

Permalink
refactor: move run exports adjustment to global op when making graph (#…
Browse files Browse the repository at this point in the history
…3265)

* refactor: move run exports adjustment to global op

* Update make_graph.py

* refactor: stash it all in the graph
  • Loading branch information
beckermr authored Dec 3, 2024
1 parent 1c67df6 commit ad495cb
Showing 1 changed file with 18 additions and 34 deletions.
52 changes: 18 additions & 34 deletions conda_forge_tick/make_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,19 +230,6 @@ def _add_run_exports_per_node(attrs, outputs_lut, strong_exports):
def _create_edges(gx: nx.DiGraph) -> nx.DiGraph:
logger.info("inferring nodes and edges")

# make the outputs look up table so we can link properly
# and add this as an attr so we can use later
gx.graph["outputs_lut"] = make_outputs_lut_from_graph(gx)

# collect all of the strong run exports
# we add the compiler stubs so that we know when host and run
# envs will have compiler-related packages in them
strong_exports = {
node_name
for node_name, node in gx.nodes.items()
if node.get("payload").get("strong_exports", False)
} | set(COMPILER_STUBS_WITH_STRONG_EXPORTS)

# This drops all the edge data and only keeps the node data
gx = nx.create_empty_copy(gx)

Expand All @@ -252,7 +239,7 @@ def _create_edges(gx: nx.DiGraph) -> nx.DiGraph:
for node in all_nodes:
with gx.nodes[node]["payload"] as attrs:
deps = _add_run_exports_per_node(
attrs, gx.graph["outputs_lut"], strong_exports
attrs, gx.graph["outputs_lut"], gx.graph["strong_exports"]
)

for dep in deps:
Expand All @@ -269,33 +256,31 @@ def _create_edges(gx: nx.DiGraph) -> nx.DiGraph:
return gx


def _add_run_exports(nodes_to_update):
gx = load_graph()

new_names = [name for name in nodes_to_update if name not in gx.nodes]
for name in nodes_to_update:
sub_graph = {
"payload": LazyJson(f"node_attrs/{name}.json"),
}
if name in new_names:
gx.add_node(name, **sub_graph)
else:
gx.nodes[name].update(**sub_graph)
def _add_graph_metadata(gx: nx.DiGraph):
logger.info("adding graph metadata")

outputs_lut = make_outputs_lut_from_graph(gx)
# make the outputs look up table so we can link properly
# and add this as an attr so we can use later
gx.graph["outputs_lut"] = make_outputs_lut_from_graph(gx)

# collect all of the strong run exports
# we add the compiler stubs so that we know when host and run
# envs will have compiler-related packages in them
strong_exports = {
gx.graph["strong_exports"] = {
node_name
for node_name, node in gx.nodes.items()
if node.get("payload").get("strong_exports", False)
} | set(COMPILER_STUBS_WITH_STRONG_EXPORTS)


def _add_run_exports(gx: nx.DiGraph, nodes_to_update: set[str]):
logger.info("adding run exports")

for node in nodes_to_update:
with gx.nodes[node]["payload"] as attrs:
_add_run_exports_per_node(attrs, outputs_lut, strong_exports)
_add_run_exports_per_node(
attrs, gx.graph["outputs_lut"], gx.graph["strong_exports"]
)


def _update_graph_nodes(
Expand All @@ -310,11 +295,6 @@ def _update_graph_nodes(
mark_not_archived=mark_not_archived,
)
logger.info("feedstock fetch loop completed")

logger.info("adding run exports")
_add_run_exports(names)
logger.info("done adding run exports")

logger.info(f"memory usage: {psutil.virtual_memory()}")


Expand Down Expand Up @@ -366,6 +346,10 @@ def main(
else:
gx.nodes[name].update(**sub_graph)

_add_graph_metadata(gx)

_add_run_exports(gx, names)

gx = _create_edges(gx)

_migrate_schemas(tot_names)
Expand Down

0 comments on commit ad495cb

Please sign in to comment.