9 # Patch parsing functions
12 FIND_INFRA_IN_PATCH
= re
.compile("^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
14 def analyze_patch(patch
):
15 """Parse one patch and return the list of files modified, added or
16 removed by the patch."""
20 # If the patch is adding a package, find which infra it is
21 m
= FIND_INFRA_IN_PATCH
.match(line
)
23 infras
.add(m
.group(2))
24 if not line
.startswith("+++ "):
27 fname
= line
[line
.find("/") + 1 : ].strip()
28 if fname
== "dev/null":
31 return (files
, infras
)
33 FIND_INFRA_IN_MK
= re
.compile("^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
35 def fname_get_package_infra(fname
):
36 """Checks whether the file name passed as argument is a Buildroot .mk
37 file describing a package, and find the infrastructure it's using."""
38 if not fname
.endswith(".mk"):
41 if not os
.path
.exists(fname
):
44 with
open(fname
, "r") as f
:
47 m
= FIND_INFRA_IN_MK
.match(l
)
52 def get_infras(files
):
53 """Search in the list of files for .mk files, and collect the package
54 infrastructures used by those .mk files."""
57 infra
= fname_get_package_infra(fname
)
62 def analyze_patches(patches
):
63 """Parse a list of patches and returns the list of files modified,
64 added or removed by the patches, as well as the list of package
65 infrastructures used by those patches (if any)"""
69 (files
, infras
) = analyze_patch(patch
)
70 allfiles
= allfiles | files
71 allinfras
= allinfras | infras
72 allinfras
= allinfras |
get_infras(allfiles
)
73 return (allfiles
, allinfras
)
76 # DEVELOPERS file parsing functions
80 def __init__(self
, name
, files
):
83 self
.packages
= parse_developer_packages(files
)
84 self
.architectures
= parse_developer_architectures(files
)
85 self
.infras
= parse_developer_infras(files
)
88 f
= os
.path
.abspath(f
)
94 def parse_developer_packages(fnames
):
95 """Given a list of file patterns, travel through the Buildroot source
96 tree to find which packages are implemented by those file
97 patterns, and return a list of those packages."""
100 for root
, dirs
, files
in os
.walk(fname
):
102 path
= os
.path
.join(root
, f
)
103 if fname_get_package_infra(path
):
104 pkg
= os
.path
.splitext(f
)[0]
108 def parse_arches_from_config_in(fname
):
109 """Given a path to an arch/Config.in.* file, parse it to get the list
110 of BR2_ARCH values for this architecture."""
112 with
open(fname
, "r") as f
:
113 parsing_arches
= False
116 if l
== "config BR2_ARCH":
117 parsing_arches
= True
120 m
= re
.match("^\s*default \"([^\"]*)\".*", l
)
122 arches
.add(m
.group(1))
124 parsing_arches
= False
127 def parse_developer_architectures(fnames
):
128 """Given a list of file names, find the ones starting by
129 'arch/Config.in.', and use that to determine the architecture a
130 developer is working on."""
133 if not re
.match("^.*/arch/Config\.in\..*$", fname
):
135 arches
= arches |
parse_arches_from_config_in(fname
)
138 def parse_developer_infras(fnames
):
141 m
= re
.match("^package/pkg-([^.]*).mk$", fname
)
143 infras
.add(m
.group(1))
146 def parse_developers(basepath
=None):
147 """Parse the DEVELOPERS file and return a list of Developer objects."""
151 basepath
= os
.getcwd()
152 with
open(os
.path
.join(basepath
, "DEVELOPERS"), "r") as f
:
157 if l
.startswith("#"):
159 elif l
.startswith("N:"):
160 if name
is not None or len(files
) != 0:
161 print("Syntax error in DEVELOPERS file, line %d" % linen
)
163 elif l
.startswith("F:"):
164 fname
= l
[2:].strip()
165 dev_files
= glob
.glob(os
.path
.join(basepath
, fname
))
166 if len(dev_files
) == 0:
167 print("WARNING: '%s' doesn't match any file" % fname
)
172 developers
.append(Developer(name
, files
))
176 print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen
, l
))
179 # handle last developer
181 developers
.append(Developer(name
, files
))
184 def check_developers(developers
, basepath
=None):
185 """Look at the list of files versioned in Buildroot, and returns the
186 list of files that are not handled by any developer"""
188 basepath
= os
.getcwd()
189 cmd
= ["git", "--git-dir", os
.path
.join(basepath
, ".git"), "ls-files"]
190 files
= subprocess
.check_output(cmd
).strip().split("\n")
195 if d
.hasfile(os
.path
.join(basepath
, f
)):
199 unhandled_files
.append(f
)
200 return unhandled_files