Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support proto_compiled_sources with ts_project #378

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
17 changes: 11 additions & 6 deletions cmd/gencopy/gencopy.bash.in
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,12 @@ CONFIG_SHORT_PATH=@@CONFIG_SHORT_PATH@@

# env
# set -x
# echo "PWD: $PWD"
# find .
# find ..

# Get the directory of the current script (part of the runfiles tree)
script_dir=$(dirname "$0")

# find_runfile prints the location of a runfile in the source workspace,
# either by reading the symbolic link or reading the runfiles manifest.
Expand Down Expand Up @@ -45,18 +50,18 @@ fi
# Note that we don't change directories first; if we did, Generator wouldn't be
# able to find runfiles, and some extensions rely on that. Generator can use
# BUILD_WORKSPACE_DIRECTORY to interpret relative paths on the command line.
GENCOPY_short_path=$(find_runfile "$GENCOPY_SHORT_PATH")
if [ -z "$GENCOPY_short_path" ]; then
GENCOPY_path=$(find_runfile "$GENCOPY_SHORT_PATH")
if [ -z "$GENCOPY_path" ]; then
echo "error: could not locate gencopy binary" >&2
exit 1
fi

CONFIG_short_path=$(find_runfile "$CONFIG_SHORT_PATH")
if [ -z "$CONFIG_short_path" ]; then
CONFIG_path=$(find_runfile "$CONFIG_SHORT_PATH")
if [ -z "$CONFIG_path" ]; then
echo "error: could not locate gencopy configuration file" >&2
exit 1
fi

"$GENCOPY_short_path" \
-config="$CONFIG_short_path" \
"$GENCOPY_path" \
-config="$CONFIG_path" \
-workspace_root_directory="${BUILD_WORKSPACE_DIRECTORY:-""}"
6 changes: 5 additions & 1 deletion cmd/gencopy/gencopy.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ gencopy_attrs = {
doc = "The label.name used to regenerate targets",
mandatory = True,
),
"extension": attr.string(
doc = "optional file extension to add to the copied file",
mandatory = False,
),
"_gencopy_script": attr.label(
doc = "The gencopy script template",
default = str(Label("//cmd/gencopy:gencopy.bash.in")),
Expand All @@ -43,7 +47,7 @@ def gencopy_config(ctx):
)

def gencopy_action(ctx, config, runfiles):
"""gencopy_action declared a bazel action that runs the gencopy.bash script.
"""gencopy_action declares a bazel action that runs the gencopy.bash script.

Args:
ctx: the context object.
Expand Down
2 changes: 0 additions & 2 deletions cmd/gencopy/gencopy.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import (
"os"
"path/filepath"
"strconv"
"strings"

"github.com/google/go-cmp/cmp"
)
Expand Down Expand Up @@ -174,7 +173,6 @@ func makePkgSrcDstPairs(cfg *Config, pkg *PackageConfig) []*SrcDst {

func makePkgSrcDstPair(cfg *Config, pkg *PackageConfig, src, dst string) *SrcDst {
if pkg.TargetWorkspaceRoot != "" {
src = filepath.Join("external", strings.TrimPrefix(src, ".."))
dst = filepath.Join(pkg.TargetWorkspaceRoot, dst)
}
dst = filepath.Join(cfg.WorkspaceRootDirectory, dst)
Expand Down
2 changes: 1 addition & 1 deletion cmd/gencopy/gencopy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ func TestMakePkgSrcDstPair(t *testing.T) {
pkg: PackageConfig{TargetWorkspaceRoot: "external/foo"},
src: "../foo/file.txt",
dst: "file.txt",
want: SrcDst{Src: "external/foo/file.txt", Dst: "/home/external/foo/file.txt"},
want: SrcDst{Src: "../foo/file.txt", Dst: "/home/external/foo/file.txt"},
},
} {
t.Run(name, func(t *testing.T) {
Expand Down
1 change: 1 addition & 0 deletions pkg/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ filegroup(
"//pkg/plugin/grpc/grpcweb:all_files",
"//pkg/plugin/grpcecosystem/grpcgateway:all_files",
"//pkg/plugin/scalapb/scalapb:all_files",
"//pkg/plugin/scalapb/zio_grpc:all_files",
"//pkg/plugin/stackb/grpc_js:all_files",
"//pkg/plugin/stephenh/ts-proto:all_files",
"//pkg/plugintest:all_files",
Expand Down
3 changes: 3 additions & 0 deletions pkg/protoc/proto_compiled_sources.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ func (s *protoCompiledSources) LoadInfo() rule.LoadInfo {

// ProvideRule implements part of the LanguageRule interface.
func (s *protoCompiledSources) ProvideRule(cfg *LanguageRuleConfig, config *ProtocConfiguration) RuleProvider {
if len(config.Outputs) == 0 {
return nil
}
return &protoCompileRule{
kind: "proto_compiled_sources",
nameSuffix: "compiled_sources",
Expand Down
3 changes: 3 additions & 0 deletions rules/private/proto_repository_tools_srcs.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ PROTO_REPOSITORY_TOOLS_SRCS = [
"@build_stack_rules_proto//pkg/plugin/grpcecosystem/grpcgateway:protoc-gen-grpc-gateway.go",
"@build_stack_rules_proto//pkg/plugin/scalapb/scalapb:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/scalapb/scalapb:protoc_gen_scala.go",
"@build_stack_rules_proto//pkg/plugin/scalapb/zio_grpc:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/scalapb/zio_grpc:protoc_gen_zio_grpc.go",
"@build_stack_rules_proto//pkg/plugin/stackb/grpc_js:BUILD.bazel",
"@build_stack_rules_proto//pkg/plugin/stackb/grpc_js:protoc-gen-grpc-js.go",
"@build_stack_rules_proto//pkg/plugin/stephenh/ts-proto:BUILD.bazel",
Expand Down Expand Up @@ -179,6 +181,7 @@ PROTO_REPOSITORY_TOOLS_SRCS = [
"@build_stack_rules_proto//plugin/grpc/grpc-web:BUILD.bazel",
"@build_stack_rules_proto//plugin/grpc-ecosystem/grpc-gateway:BUILD.bazel",
"@build_stack_rules_proto//plugin/scalapb/scalapb:BUILD.bazel",
"@build_stack_rules_proto//plugin/scalapb/zio-grpc:BUILD.bazel",
"@build_stack_rules_proto//plugin/stackb/grpc_js:BUILD.bazel",
"@build_stack_rules_proto//plugin/stephenh/ts-proto:BUILD.bazel",
"@build_stack_rules_proto//rules:BUILD.bazel",
Expand Down
140 changes: 108 additions & 32 deletions rules/proto_compile.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -94,22 +94,48 @@ def is_windows(ctx):
return ctx.configuration.host_path_separator == ";"

def _proto_compile_impl(ctx):
# mut <list<File>>
outputs = [] + ctx.outputs.outputs

# mut <?string> If defined, we are using the srcs to predict the outputs
# srcgen_ext = None
if len(ctx.attr.srcs) > 0:
if len(ctx.outputs.outputs) > 0:
fail("rule must provide 'srcs' or 'outputs', but not both")

# srcgen_ext = ctx.attr.srcgen_ext
outputs = [ctx.actions.declare_file(name) for name in ctx.attr.srcs]

###
### Part 1: setup variables used in scope
###

# out_dir is used in conjunction with file.short_path to determine root
# output file paths
out_dir = ctx.bin_dir.path
if ctx.label.workspace_root:
out_dir = "/".join([out_dir, ctx.label.workspace_root])

if len(ctx.attr.srcs) > 0 and len(ctx.outputs.outputs) > 0:
fail("rule must provide 'srcs' or 'outputs' (but not both)")

# <dict<string,File>: output files mapped by their package-relative path.
# This struct is given to the provider.
output_files_by_rel_path = {}

# const <dict<string,string>. The key is the file basename, value is the
# short_path of the output file.
output_short_paths_by_basename = {}

# renames is a mapping from the output filename that was produced by the
# plugin to the actual name we want to output.
renames = {}

if len(ctx.attr.srcs):
# assume filenames in srcs are already package-relative
for name in ctx.attr.srcs:
rel = "/".join([ctx.label.package, name])
actual_name = name + ctx.attr.output_file_suffix
if actual_name != name:
renames[rel] = "/".join([ctx.label.package, actual_name])
f = ctx.actions.declare_file(actual_name)
output_files_by_rel_path[rel] = f
output_short_paths_by_basename[name] = rel
else:
for f in ctx.outputs.outputs:
# rel = _get_package_relative_path(ctx.label, f.short_path)
rel = f.short_path
output_files_by_rel_path[rel] = f
output_short_paths_by_basename[f.basename] = rel

# const <bool> verbosity flag
verbose = ctx.attr.verbose

Expand All @@ -125,9 +151,6 @@ def _proto_compile_impl(ctx):
# const <dict<string,string>>
outs = {_plugin_label_key(Label(k)): v for k, v in ctx.attr.outs.items()}

# const <dict<string,File>. outputs indexed by basename.
outputs_by_basename = {f.basename: f for f in outputs}

# mut <list<File>> set of descriptors for the compile action
descriptors = proto_info.transitive_descriptor_sets.to_list()

Expand Down Expand Up @@ -279,16 +302,16 @@ def _proto_compile_impl(ctx):
# into place
if len(ctx.attr.output_mappings) > 0:
copy_commands = []
out_dir = ctx.bin_dir.path
if ctx.label.workspace_root:
out_dir = "/".join([out_dir, ctx.label.workspace_root])
for mapping in ctx.attr.output_mappings:
basename, _, intermediate_filename = mapping.partition("=")
intermediate_filename = "/".join([out_dir, intermediate_filename])
output = outputs_by_basename.get(basename, None)
if not output:
output_short_path = output_short_paths_by_basename.get(basename)
if not output_short_path:
fail("the mapped file '%s' was not listed in outputs" % basename)
copy_commands.append("cp '{}' '{}'".format(intermediate_filename, output.path))
copy_commands.append("cp '{dir}/{src}' '{dir}/{dst}'".format(
dir = out_dir,
src = intermediate_filename,
dst = output_short_path,
))
copy_script = ctx.actions.declare_file(ctx.label.name + "_copy.sh")
ctx.actions.write(copy_script, "\n".join(copy_commands), is_executable = True)
inputs.append(copy_script)
Expand All @@ -298,17 +321,53 @@ def _proto_compile_impl(ctx):
if len(mods):
mv_commands = []
for suffix, action in mods.items():
for f in outputs:
if f.short_path.endswith(suffix):
mv_commands.append("awk '%s' %s > %s.tmp" % (action, f.path, f.path))
mv_commands.append("mv %s.tmp %s" % (f.path, f.path))
for output_short_path in output_short_paths_by_basename.values():
if output_short_path.endswith(suffix):
mv_commands.append("awk '{action}' {dir}/{short_path} > {dir}/{short_path}.tmp".format(
action = action,
dir = out_dir,
short_path = output_short_path,
))
mv_commands.append("mv {dir}/{short_path}.tmp {dir}/{short_path}".format(
dir = out_dir,
short_path = output_short_path,
))
mv_script = ctx.actions.declare_file(ctx.label.name + "_mv.sh")
ctx.actions.write(mv_script, "\n".join(mv_commands), is_executable = True)
inputs.append(mv_script)
commands.append(mv_script.path)

# if the ctx.attr.output_file_suffix was set in conjunction with
# ctx.attr.srcs, we want to rename all the output files to a different
# suffix (e.g. foo.ts -> foo.ts.gen). The relocates the files that were
# generated by protoc plugins to a different name. This is used by the
# 'proto_compiled_sources' rule. The reason is that if we also have a
# `foo.ts` source file sitting in the workspace (checked into git), rules
# like `ts_project` will perform a 'copy_to_bin' action on the file. If we
# didn't do this rename, the ts_project rule and the proto_compile rule
# would attempt to create the same output file in bazel-bin (foo.ts),
# causing an error.
#
# In the case of proto_compiled_sources, executing `bazel run
# //proto:foo_ts.update` would generate the file
# `bazel-bin/proto/foo.ts.gen` and the gencopy operation will copy that file
# to `WORKSPACE/proto/foo.ts`, essentially making the `.gen` a
# temporary-like file.
if len(renames):
rename_commands = []
for src, dst in renames.items():
rename_commands.append("mv {dir}/{src} {dir}/{dst}".format(
dir = out_dir,
src = src,
dst = dst,
))
rename_script = ctx.actions.declare_file(ctx.label.name + "_rename.sh")
ctx.actions.write(rename_script, "\n".join(rename_commands), is_executable = True)
inputs.append(rename_script)
commands.append(rename_script.path)

if verbose:
before = ["env", "pwd", "ls -al .", "echo '\n##### SANDBOX BEFORE RUNNING PROTOC'", "find * -type l"]
before = ["env", "pwd", "ls -al .", "echo '\n##### SANDBOX BEFORE RUNNING PROTOC'", "find * -type l | grep -v node_modules"]
after = ["echo '\n##### SANDBOX AFTER RUNNING PROTOC'", "find * -type f"]
commands = before + commands + after

Expand All @@ -327,7 +386,7 @@ def _proto_compile_impl(ctx):
for f in inputs:
# buildifier: disable=print
print("INPUT:", f.path)
for f in outputs:
for f in output_files_by_rel_path.values():
# buildifier: disable=print
print("EXPECTED OUTPUT:", f.path)

Expand All @@ -336,17 +395,26 @@ def _proto_compile_impl(ctx):
command = "\n".join(commands),
inputs = inputs,
mnemonic = "Protoc",
outputs = outputs,
outputs = output_files_by_rel_path.values(),
progress_message = "Compiling protoc outputs for %r" % [f.basename for f in protos],
tools = tools,
input_manifests = input_manifests,
env = {"BAZEL_BINDIR": ctx.bin_dir.path},
)

return [
ProtoCompileInfo(label = ctx.label, outputs = outputs),
DefaultInfo(files = depset(outputs)),
outputs = output_files_by_rel_path.values()

providers = [
ProtoCompileInfo(
label = ctx.label,
outputs = outputs,
output_files_by_rel_path = output_files_by_rel_path,
),
]
if ctx.attr.default_info:
providers.append(DefaultInfo(files = depset(outputs)))

return providers

proto_compile = rule(
implementation = _proto_compile_impl,
Expand Down Expand Up @@ -387,6 +455,14 @@ proto_compile = rule(
),
"verbose": attr.bool(
doc = "The verbosity flag.",
default = False,
),
"default_info": attr.bool(
doc = "If false, do not return the DefaultInfo provider",
default = True,
),
"output_file_suffix": attr.string(
doc = "If set, copy the output files to a new set having this suffix",
),
},
toolchains = ["@build_stack_rules_proto//toolchain:protoc"],
Expand Down
Loading
Loading