1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
|
load("//tools:defs.bzl", "go_binary", "pkg_tar")
load("//website:defs.bzl", "docs")
package(licenses = ["notice"])
# website is the full container image. Note that this actually just collects
# other dependendcies and runs Docker locally to import and tag the image.
sh_binary(
name = "website",
srcs = ["import.sh"],
data = [":files"],
tags = [
"local",
"manual",
],
)
# files is the full file system of the generated container.
#
# It must collect the all tarballs (produced by the rules below), and run it
# through the Dockerfile to generate the site. Note that this checks all links,
# and therefore requires all static content to be present as well.
#
# Note that this rule violates most aspects of hermetic builds. However, this
# works much more reliably than depending on the container_image rules from
# bazel itself, which are convoluted and seem to have a hard time even finding
# the toolchain.
genrule(
name = "files",
srcs = [
":config",
":docs",
":posts",
":static",
":syscallmd",
"//website/cmd/server",
],
outs = ["files.tgz"],
cmd = "set -x; " +
"rm -rf $(@D)/input && mkdir -p $(@D)/input && " +
"rm -rf $(@D)/output && mkdir -p $(@D)/output/_site && " +
"tar -xf $(location :config) -C $(@D)/input && " +
"tar -xf $(location :docs) -C $(@D)/input && " +
"tar -xf $(location :posts) -C $(@D)/input && " +
"tar -xf $(location :syscallmd) -C $(@D)/input && " +
"find $(@D)/input -type f -exec chmod u+rw {} \\; && " +
"docker run -i --user $$(id -u):$$(id -g) " +
"-v $$(readlink -m $(@D)/input):/input " +
"-v $$(readlink -m $(@D)/output/_site):/output " +
"gvisor.dev/images/jekyll && " +
"tar -xf $(location :static) -C $(@D)/output/_site && " +
"docker run -i --user $$(id -u):$$(id -g) " +
"-v $$(readlink -m $(@D)/output/_site):/output " +
"gvisor.dev/images/jekyll " +
"/usr/gem/bin/htmlproofer " +
"--disable-external " +
"--check-html " +
"/output && " +
"cp $(location //website/cmd/server) $(@D)/output/server && " +
"tar -zcf $@ -C $(@D)/output . && " +
"rm -rf $(@D)/input $(@D)/output",
tags = [
"local",
"manual",
"nosandbox",
],
)
# static are the purely static parts of the site. These are effectively copied
# in after jekyll generates all the dynamic content.
pkg_tar(
name = "static",
srcs = glob([
"archive.key",
"performance/**",
]),
strip_prefix = "./",
)
# config is "mostly" static content. These are parts of the site that are
# present when jekyll runs, but are not dynamically generated.
pkg_tar(
name = "config",
srcs = glob([
"assets/**",
"blog/*.html",
"*.yml",
"css/**",
"index.md",
"_includes/**",
"_layouts/**",
"_plugins/**",
"_sass/**",
]),
strip_prefix = "./",
)
# docs is the dynamic content of the site.
docs(
name = "docs",
deps = [
"//:code_of_conduct",
"//:contributing",
"//:governance",
"//:security",
"//g3doc:community",
"//g3doc:index",
"//g3doc:roadmap",
"//g3doc/architecture_guide:index",
"//g3doc/architecture_guide:performance",
"//g3doc/architecture_guide:platforms",
"//g3doc/architecture_guide:resources",
"//g3doc/architecture_guide:security",
"//g3doc/user_guide:FAQ",
"//g3doc/user_guide:checkpoint_restore",
"//g3doc/user_guide:compatibility",
"//g3doc/user_guide:debugging",
"//g3doc/user_guide:filesystem",
"//g3doc/user_guide:install",
"//g3doc/user_guide:networking",
"//g3doc/user_guide:platforms",
"//g3doc/user_guide/quick_start:docker",
"//g3doc/user_guide/quick_start:kubernetes",
"//g3doc/user_guide/quick_start:oci",
"//g3doc/user_guide/tutorials:cni",
"//g3doc/user_guide/tutorials:docker",
"//g3doc/user_guide/tutorials:kubernetes",
],
)
# posts are moved to the _posts directory.
pkg_tar(
name = "posts",
srcs = glob([
"blog/*.md",
]),
package_dir = "_posts",
)
# Generate JSON for system call tables
genrule(
name = "syscalljson",
outs = ["syscalls.json"],
cmd = "$(location //runsc) -- help syscalls -format json -filename $@",
tools = ["//runsc"],
)
# Generate markdown from the json dump.
genrule(
name = "syscallmd",
srcs = [":syscalljson"],
outs = ["syscallsmd"],
cmd = "mkdir $(@D)/_tmp && \
$(location //website/cmd/generate-syscall-docs) -in $< -out $(@D)/_tmp && \
tar -C $(@D)/_tmp -czf $@ . && \
rm -rf $(@D)/_tmp",
tools = ["//website/cmd/generate-syscall-docs"],
)
|