Skip to content
156 changes: 105 additions & 51 deletions extensions/commands/art/cmd_build_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def _get_requested_by(nodes, node_id, artifact_type):
class _BuildInfo:

def __init__(self, graph, name, number, repository, with_dependencies=False,
url=None, user=None, password=None):
add_cached_deps=False, url=None, user=None, password=None):
self._graph = graph
self._name = name
self._number = number
Expand All @@ -120,6 +120,7 @@ def __init__(self, graph, name, number, repository, with_dependencies=False,
self._password = password
self._cached_artifact_info = {}
self._with_dependencies = with_dependencies
self._add_cached_deps = add_cached_deps

def get_artifacts(self, node, artifact_type, is_dependency=False):
"""
Expand All @@ -145,7 +146,7 @@ def _get_local_artifacts():
file_list = list(dl_folder.glob("*"))
if len(file_list) >= 3:
for file_path in dl_folder.glob("*"):
if file_path.is_file():
if file_path.is_file(): # FIXME: Make it recursive for metadata folder
file_name = file_path.name
md5, sha1, sha256 = _get_hashes(file_path)
artifact_info = {"type": os.path.splitext(file_name)[1].lstrip('.'),
Expand Down Expand Up @@ -226,59 +227,107 @@ def _get_remote_artifacts():

return artifacts

def get_pyreq_artifacts(self, pyreq, pyref, is_dependency=False):
artifacts_folder = pyreq.get("path")
# this may happen for conan versions < 2.0.14, do not crash in that case
if artifacts_folder is None:
return
remote_path = _get_remote_path(pyref)
artifacts = []

dl_folder = Path(artifacts_folder).parents[0] / "d"
file_list = list(dl_folder.glob("*"))
for f in file_list:
if not f.is_file():
continue # FIXME: This is discarding metadata folders
md5, sha1, sha256 = _get_hashes(f)

artifact_info = {"type": os.path.splitext(f.name)[1].lstrip('.'),
"sha256": sha256,
"sha1": sha1,
"md5": md5}

if not is_dependency:
artifact_info.update({"name": f.name, "path": f'{self._repository}/{remote_path}/{f.name}'})
else:
artifact_info.update({"id": f"{pyref} :: {f.name}"})

artifacts.append(artifact_info)
return artifacts

def create_pyreq_modules(self, node):
python_requires = node.get("python_requires")
pyreq_modules = []
for pyref, pyreq in python_requires.items():

artifacts = self.get_pyreq_artifacts(pyreq, pyref)

module = {"type": "conan",
"id": pyref,
"artifacts": artifacts}
pyreq_modules.append(module)
return pyreq_modules

def create_module(self, node, module_type, transitive_dependencies=None, python_requires=None):
if module_type=="recipe" or (node.get("package_id") and node.get("prev") and module_type=="package"):
ref = node.get("ref")
module = {
"type": "conan",
"id": str(ref) if module_type=="recipe" else f'{str(ref)}:{node.get("package_id")}#{node.get("prev")}',
"artifacts": self.get_artifacts(node, module_type)
}

if transitive_dependencies or python_requires:
nodes = self._graph["graph"]["nodes"]
all_dependencies = []

if transitive_dependencies:
for require_id in transitive_dependencies:
deps_artifacts = self.get_artifacts(nodes.get(require_id), module_type, is_dependency=True)
all_dependencies.extend(deps_artifacts)

if python_requires:
for pyref, pyreq in python_requires.items():
pyreq_artifacts = self.get_pyreq_artifacts(pyreq, pyref, is_dependency=True)
all_dependencies.extend(pyreq_artifacts)

module.update({"dependencies": all_dependencies})

return module


def get_modules(self):
ret = []
modules_list = []
try:
nodes = self._graph["graph"]["nodes"]
except KeyError:
raise ConanException("JSON does not contain graph information")
except KeyError as e:
raise ConanException(f"JSON does not contain graph information: {e}")

for id, node in nodes.items():
for node in nodes.values():
ref = node.get("ref")
if ref:
transitive_dependencies = node.get("dependencies").keys() if node.get("dependencies").keys() else []

# only add the nodes that were marked as built
if node.get("binary") == "Build":

# recipe module
module = {
"type": "conan",
"id": str(ref),
"artifacts": self.get_artifacts(node, "recipe")
}

if self._with_dependencies:
all_dependencies = []
for require_id in transitive_dependencies:
deps_artifacts = self.get_artifacts(nodes.get(require_id), "recipe",
is_dependency=True)
all_dependencies.extend(deps_artifacts)

module.update({"dependencies": all_dependencies})

ret.append(module)

# package module
if node.get("package_id") and node.get("prev"):
module = {
"type": "conan",
"id": f'{str(ref)}:{node.get("package_id")}#{node.get("prev")}',
"artifacts": self.get_artifacts(node, "package")
}
# get the dependencies and its artifacts
if self._with_dependencies:
all_dependencies = []
for require_id in transitive_dependencies:
deps_artifacts = self.get_artifacts(nodes.get(require_id), "package",
is_dependency=True)
all_dependencies.extend(deps_artifacts)

module.update({"dependencies": all_dependencies})

ret.append(module)

return ret
binary = node.get("binary")
dependencies = node.get("dependencies", {})

if ref and (binary == "Build" or (binary == "Cache" and self._add_cached_deps)):
transitive_dependencies = list(dependencies.keys()) if self._with_dependencies else None

# If the package that was built had a python_requires then add it as a separate module

python_requires = node.get("python_requires")
if python_requires:
modules = self.create_pyreq_modules(node)
modules_list.extend(modules)

# For each package that was built we create a recipe module and a package module
recipe_module = self.create_module(node, "recipe", transitive_dependencies, python_requires)
modules_list.append(recipe_module)


package_module = self.create_module(node, "package", transitive_dependencies)
modules_list.append(package_module)


return modules_list

def header(self):
return {"version": "1.0.1",
Expand Down Expand Up @@ -360,6 +409,10 @@ def build_info_create(conan_api: ConanAPI, parser, subparser, *args):
subparser.add_argument("--with-dependencies", help="Whether to add dependencies information or not. Default: false.",
action='store_true', default=False)

subparser.add_argument("--add-cached-deps", help="It will add not only the Conan packages that are built "
"but also the ones that are used from the cache but not built. Default: false.",
action='store_true', default=False)

args = parser.parse_args(*args)

url, user, password = get_url_user_password(args)
Expand All @@ -370,7 +423,8 @@ def build_info_create(conan_api: ConanAPI, parser, subparser, *args):
# remove the 'conanfile' node
data["graph"]["nodes"].pop("0")
bi = _BuildInfo(data, args.build_name, args.build_number, args.repository,
with_dependencies=args.with_dependencies, url=url, user=user, password=password)
with_dependencies=args.with_dependencies,
add_cached_deps=args.add_cached_deps, url=url, user=user, password=password)

cli_out_write(bi.create())

Expand Down
82 changes: 81 additions & 1 deletion tests/test_artifactory_commands.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import json
import os
import tempfile
import textwrap

from tools import run
from tools import run, save
from conan.tools.scm import Version
from conan import conan_version

import pytest

Expand Down Expand Up @@ -198,6 +202,82 @@ def test_build_info_create_deps():
run('conan remove "*" -c -r extensions-stg')


@pytest.mark.requires_credentials
def test_build_info_create_from_cached_deps():
# Make sure artifactory repos are empty before starting the test
run("conan remove mypkg* -c -r extensions-stg")
run("conan remove mypkg* -c -r extensions-prod")

run(f'conan art:server add artifactory {os.getenv("ART_URL")} --user="{os.getenv("CONAN_LOGIN_USERNAME_EXTENSIONS_STG")}" --password="{os.getenv("CONAN_PASSWORD_EXTENSIONS_STG")}"')

# Create dependency packages and upload them
run("conan new cmake_lib -d name=libc -d version=1.0 --force")
run("conan create . -tf=''")
run("conan new cmake_lib -d name=liba -d version=1.0 -d requires=libc/1.0 --force")
run("conan create . -tf=''")

run("conan upload '*' --dry-run -c -r extensions-stg")

# libc node in graph is cached
run("conan install . --format json > install_release.json")

run(f'conan art:build-info create install_release.json bi_release 1 extensions-stg --server artifactory --with-dependencies > bi_release.json')

with open("bi_release.json", "r") as file:
build_info = json.load(file)

assert len(build_info.get("modules")) == 0

run(f'conan art:build-info create install_release.json bi_release 1 extensions-stg --server artifactory --with-dependencies --add-cached-deps > bi_release.json')

with open("bi_release.json", "r") as file:
build_info = json.load(file)

assert len(build_info.get("modules")) == 2


@pytest.mark.requires_credentials
@pytest.mark.skipif(conan_version <= Version("2.0.13"), reason="path key is only added to python requires in graph for conan >= 2.0.14")
def test_build_info_create_python_requires():
build_name = "mybuildinfo"
build_number = "1"

run(f'conan art:server add artifactory {os.getenv("ART_URL")} --user="{os.getenv("CONAN_LOGIN_USERNAME_EXTENSIONS_STG")}" --password="{os.getenv("CONAN_PASSWORD_EXTENSIONS_STG")}"')

# Create dependency packages and upload them
pytool = textwrap.dedent("""\
from conan import ConanFile
class PyTool(ConanFile):
name = "pytool"
version = "0.1"
""")
save("conanfile.py", pytool)
run('conan create . ')
pkg = textwrap.dedent("""\
from conan import ConanFile
class PyTool(ConanFile):
name = "pkg"
version = "0.1"
python_requires = "pytool/0.1"
""")
save("conanfile.py", pkg)
run('conan create . --format=json > create_release.json')

run("conan upload '*' -c --dry-run -r=extensions-stg")
run(f'conan art:build-info create create_release.json {build_name}_release {build_number} extensions-stg --server artifactory --with-dependencies > {build_name}_release.json')
build_info = open("mybuildinfo_release.json").read()

build_info = json.loads(build_info)
assert "pytool/0.1#" in build_info["modules"][0]["id"]
artifacts = build_info["modules"][0]["artifacts"]
assert len(artifacts) == 2

artifact_names = [artifact["name"] for artifact in artifacts]

assert "conanfile.py" in artifact_names
assert "conanmanifest.txt" in artifact_names


@pytest.mark.requires_credentials
def test_fail_if_not_uploaded():
"""
Expand Down