forked from bluerobotics/BlueOS-Extensions-Repository
/
consolidate.py
executable file
·261 lines (223 loc) · 9.72 KB
/
consolidate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
#!/usr/bin/env python3
import asyncio
import dataclasses
import json
from enum import Enum
from pathlib import Path
from typing import Any, AsyncIterable, Dict, List, Optional, Union
import aiohttp
import json5
import semver
from registry import Registry
REPO_ROOT = "https://raw.githubusercontent.com/bluerobotics/BlueOS-Extensions-Repository/master/repos"
class StrEnum(str, Enum):
"""Temporary filler until Python 3.11 available."""
def __str__(self) -> str:
return self.value # type: ignore
class EnhancedJSONEncoder(json.JSONEncoder):
"""
Custom json encoder for dataclasses,
see https://docs.python.org/3/library/json.html#json.JSONEncoder.default
Returns a serializable type
"""
def default(self, o: Any) -> Union[Any, Dict[str, Any]]:
if dataclasses.is_dataclass(o):
return dataclasses.asdict(o)
return super().default(o)
@dataclasses.dataclass
class Author:
name: str
email: str
@staticmethod
def from_json(json_dict: Dict[str, str]) -> "Author":
return Author(name=json_dict["name"], email=json_dict["email"])
@dataclasses.dataclass
class Platform:
architecture: str
variant: Optional[str] = None
# pylint: disable=invalid-name
os: Optional[str] = None
@dataclasses.dataclass
class Image:
digest: Optional[str]
expanded_size: int
platform: Platform
@dataclasses.dataclass
class Company:
name: str
about: Optional[str]
email: Optional[str]
@staticmethod
def from_json(json_dict: Dict[str, str]) -> "Company":
return Company(name=json_dict["name"], email=json_dict.get("email", None), about=json_dict.get("about", None))
class ExtensionType(StrEnum):
DEVICE_INTEGRATION = "device-integration"
EXAMPLE = "example"
THEME = "theme"
OTHER = "other"
TOOL = "tool"
# pylint: disable=too-many-instance-attributes
@dataclasses.dataclass
class Version:
permissions: Optional[Dict[str, Any]]
requirements: Optional[str]
tag: Optional[str]
website: str
authors: List[Author]
docs: Optional[str]
readme: Optional[str]
company: Optional[Company]
support: Optional[str]
type: ExtensionType
filter_tags: List[str]
extra_links: Dict[str, str]
images: List[Image]
@staticmethod
def validate_filter_tags(tags: List[str]) -> List[str]:
"""Returns a list of up to 10 lower-case alpha-numeric tags (dashes allowed)."""
return [tag.lower() for tag in tags if tag.replace("-", "").isalnum()][:10]
@dataclasses.dataclass
class RepositoryEntry:
identifier: str
name: str
description: str
docker: str
website: str
versions: Dict[str, Version]
extension_logo: Optional[str]
company_logo: Optional[str]
class Consolidator:
registry = Registry()
consolidated_data: List[RepositoryEntry] = []
@staticmethod
def repo_folder() -> Path:
return Path(__file__).parent.parent.joinpath("repos")
@staticmethod
async def fetch_readme(url: str) -> str:
if not url.startswith("http"):
print(f"Invalid Readme url: {url}")
return "Readme not provided."
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
if resp.status != 200:
print(f"Error status {resp.status}")
raise Exception(f"Could not get readme {url}: status: {resp.status}")
if resp.content_type != "text/plain":
raise Exception(f"bad response type for readme: {resp.content_type}, expected text/plain")
return await resp.text()
async def all_repositories(self) -> AsyncIterable[RepositoryEntry]:
repos = self.repo_folder()
for repo in repos.glob("**/metadata.json"):
with open(repo, "r", encoding="utf-8") as individual_file:
company, extension_name = repo.as_posix().split("/")[-3:-1]
identifier = ".".join([company, extension_name])
try:
data = json5.load(individual_file)
except Exception as exc:
raise Exception(f"Unable to parse file {repo}") from exc
company_logo = (repo / "../../company_logo.png").resolve().relative_to(repos.resolve())
extension_logo_file = (repo / "../extension_logo.png").resolve()
if extension_logo_file.exists():
extension_logo = extension_logo_file.resolve().relative_to(repos.resolve())
else:
extension_logo = company_logo
try:
new_repo = RepositoryEntry(
identifier=identifier,
name=data["name"],
docker=data["docker"],
description=data["description"],
website=data["website"],
extension_logo=f"{REPO_ROOT}/{extension_logo}" if extension_logo else None,
versions={},
company_logo=f"{REPO_ROOT}/{company_logo}" if company_logo else None,
)
yield new_repo
except Exception as error:
raise Exception(f"unable to read file {repo}: {error}") from error
@staticmethod
def valid_semver(string: str) -> Optional[semver.VersionInfo]:
# We want to allow versions to be prefixed with a 'v'.
# This is up for discussion
if string.startswith("v"):
string = string[1:]
try:
return semver.VersionInfo.parse(string)
except ValueError:
return None # not valid
def extract_images_from_tag(self, tag: Any) -> List[Image]:
active_images = [
image
for image in tag["images"]
if (image["status"] == "active" and image["architecture"] != "unknown" and image["os"] != "unknown")
]
images = [
Image(
digest=image.get("digest", None),
expanded_size=image["size"],
platform=Platform(
architecture=image["architecture"],
variant=image.get("variant", None),
os=image.get("os", None),
),
)
for image in active_images
]
return images
# pylint: disable=too-many-locals
async def run(self) -> None:
async for repository in self.all_repositories():
for tag in await self.registry.fetch_remote_tags(repository.docker):
tag_name = tag["name"]
print(tag_name)
try:
if not self.valid_semver(tag_name):
print(f"{tag_name} is not valid SemVer, ignoring it...")
continue
raw_labels = await self.registry.fetch_labels(f"{repository.docker}:{tag_name}")
permissions = raw_labels.get("permissions", None)
links = json5.loads(raw_labels.get("links", "{}"))
website = links.pop("website", raw_labels.get("website", None))
authors = json5.loads(raw_labels.get("authors", "[]"))
# documentation is just a URL for a link, but the old format had it as its own label
docs = links.pop("docs", links.pop("documentation", raw_labels.get("docs", None)))
readme = raw_labels.get("readme", None)
if readme is not None:
readme = readme.replace(r"{tag_name}", tag_name)
try:
readme = await self.fetch_readme(readme)
except Exception as error: # pylint: disable=broad-except
readme = str(error)
company_raw = raw_labels.get("company", None)
company = Company.from_json(json5.loads(company_raw)) if company_raw is not None else None
support = links.pop("support", raw_labels.get("support", None))
type_ = raw_labels.get("type", ExtensionType.OTHER)
filter_tags = json5.loads(raw_labels.get("tags", "[]"))
new_version = Version(
permissions=json5.loads(permissions) if permissions else None,
website=website,
authors=authors,
docs=json5.loads(docs) if docs else None,
readme=readme,
company=company,
support=support,
extra_links=links,
type=type_,
filter_tags=Version.validate_filter_tags(filter_tags),
requirements=raw_labels.get("requirements", None),
tag=tag_name,
images=self.extract_images_from_tag(tag),
)
repository.versions[tag_name] = new_version
except KeyError as error:
raise Exception(f"unable to parse repository {repository}: {error}") from error
# sort the versions, with the highest version first
repository.versions = dict(
sorted(repository.versions.items(), key=lambda i: self.valid_semver(i[0]), reverse=True) # type: ignore
)
if repository.versions: # only include if there's at least one valid version
self.consolidated_data.append(repository)
with open("manifest.json", "w", encoding="utf-8") as manifest_file:
manifest_file.write(json.dumps(self.consolidated_data, indent=4, cls=EnhancedJSONEncoder))
consolidator = Consolidator()
asyncio.run(consolidator.run())