-
Notifications
You must be signed in to change notification settings - Fork 39
/
owlbot.py
152 lines (133 loc) · 5.15 KB
/
owlbot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import pathlib
import re
import textwrap
from synthtool import gcp
import synthtool as s
from synthtool.languages import python
REPO_ROOT = pathlib.Path(__file__).parent.absolute()
common = gcp.CommonTemplates()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
templated_files = common.py_library(
unit_test_python_versions=["3.9", "3.10", "3.11", "3.12"],
system_test_python_versions=["3.9", "3.11", "3.12"],
cov_level=35,
intersphinx_dependencies={
"pandas": "https://pandas.pydata.org/pandas-docs/stable/",
"pydata-google-auth": "https://pydata-google-auth.readthedocs.io/en/latest/",
},
)
s.move(
templated_files,
excludes=[
# Multi-processing note isn't relevant, as bigframes is responsible for
# creating clients, not the end user.
"docs/multiprocessing.rst",
"noxfile.py",
".pre-commit-config.yaml",
"README.rst",
"CONTRIBUTING.rst",
".github/release-trigger.yml",
# BigQuery DataFrames manages its own Kokoro cluster for presubmit & continuous tests.
".kokoro/build.sh",
".kokoro/continuous/common.cfg",
".kokoro/presubmit/common.cfg",
],
)
# ----------------------------------------------------------------------------
# Fixup files
# ----------------------------------------------------------------------------
# Encourage sharring all relevant versions in bug reports.
assert 1 == s.replace( # bug_report.md
[".github/ISSUE_TEMPLATE/bug_report.md"],
re.escape("#### Steps to reproduce\n"),
textwrap.dedent(
"""
```python
import sys
import bigframes
import google.cloud.bigquery
import ibis
import pandas
import pyarrow
import sqlglot
print(f"Python: {sys.version}")
print(f"bigframes=={bigframes.__version__}")
print(f"google-cloud-bigquery=={google.cloud.bigquery.__version__}")
print(f"ibis=={ibis.__version__}")
print(f"pandas=={pandas.__version__}")
print(f"pyarrow=={pyarrow.__version__}")
print(f"sqlglot=={sqlglot.__version__}")
```
#### Steps to reproduce
""",
),
)
# Make sure build includes all necessary files.
assert 1 == s.replace( # MANIFEST.in
["MANIFEST.in"],
re.escape("recursive-include google"),
"recursive-include third_party/bigframes_vendored *\nrecursive-include bigframes",
)
# Even though BigQuery DataFrames isn't technically a client library, we are
# opting into Cloud RAD for docs hosting.
assert 1 == s.replace( # common.cfg
[".kokoro/docs/common.cfg"],
re.escape('value: "docs-staging-v2-dev"'),
'value: "docs-staging-v2"',
)
# Use a custom table of contents since the default one isn't organized well
# enough for the number of classes we have.
assert 1 == s.replace( # publish-docs.sh
[".kokoro/publish-docs.sh"],
(
re.escape("# upload docs")
+ "\n"
+ re.escape(
'python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"'
)
),
(
"# Replace toc.yml template file\n"
+ "mv docs/templates/toc.yml docs/_build/html/docfx_yaml/toc.yml\n\n"
+ "# upload docs\n"
+ 'python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"'
),
)
# Fixup the documentation.
assert 1 == s.replace( # docs/conf.py
["docs/conf.py"],
re.escape("Google Cloud Client Libraries for bigframes"),
"BigQuery DataFrames provides DataFrame APIs on the BigQuery engine",
)
# Don't omit `*/core/*.py` when counting test coverages
assert 1 == s.replace( # .coveragerc
[".coveragerc"],
re.escape(" */core/*.py\n"),
"",
)
# ----------------------------------------------------------------------------
# Samples templates
# ----------------------------------------------------------------------------
python.py_samples(skip_readmes=True)
# ----------------------------------------------------------------------------
# Final cleanup
# ----------------------------------------------------------------------------
s.shell.run(["nox", "-s", "format"], hide_output=False)
for noxfile in REPO_ROOT.glob("samples/**/noxfile.py"):
s.shell.run(["nox", "-s", "format"], cwd=noxfile.parent, hide_output=False)