forked from scalyr/scalyr-agent-2
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build_package.py
2004 lines (1663 loc) · 76.7 KB
/
build_package.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
#
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# Script used to build the RPM, Debian, and tarball packages for releasing Scalyr Agent 2.
#
# To execute this script, you must have installed fpm: https://github.com/jordansissel/fpm
#
# Usage: python build_package.py [options] rpm|tarball|deb
#
# author: Steven Czerwinski <czerwin@scalyr.com>
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
__author__ = "czerwin@scalyr.com"
import errno
import glob
import json
import os
import re
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import uuid
import getpass
from io import StringIO
from io import BytesIO
from io import open
from optparse import OptionParser
from time import gmtime, strftime
from scalyr_agent.__scalyr__ import get_install_root, SCALYR_VERSION, scalyr_init
scalyr_init()
import scalyr_agent.util as scalyr_util
# [start of 2->TODO]
# Check for suitability.
# Important. Import six as any other dependency from "third_party" libraries after "__scalyr__.scalyr_init"
import six
from six.moves import range
# [end of 2->TOD0]
# The root of the Scalyr repository should just be the parent of this file.
__source_root__ = get_install_root()
# All the different packages that this script can build.
PACKAGE_TYPES = [
"rpm",
"tarball",
"deb",
"win32",
"docker_syslog_builder",
"docker_json_builder",
"docker_api_builder",
"k8s_builder",
]
def build_package(package_type, variant, no_versioned_file_name, coverage_enabled):
"""Builds the scalyr-agent-2 package specified by the arguments.
The package is left in the current working directory. The file name of the
package is returned by this function.
@param package_type: One of `PACKAGE_TYPES`. Determines which package type is built.
@param variant: Adds the specified string into the package's iteration name. This may be None if no additional
tweak to the name is required. This is used to produce different packages even for the same package type (such
as 'rpm').
@param no_versioned_file_name: If True, will not embed a version number in the resulting artifact's file name.
This only has an affect if building one of the tarball formats.
@param coverage_enabled: If True, enables coverage analysis. Patches Dockerfile to run agent with coverage.
@return: The file name of the produced package.
"""
original_cwd = os.getcwd()
version = SCALYR_VERSION
# Create a temporary directory to build the package in.
tmp_dir = tempfile.mkdtemp(prefix="build-scalyr-agent-packages")
try:
# Change to that directory and delegate to another method for the specific type.
os.chdir(tmp_dir)
if package_type == "tarball":
artifact_file_name = build_tarball_package(
variant, version, no_versioned_file_name
)
elif package_type == "win32":
artifact_file_name = build_win32_installer_package(variant, version)
elif package_type == "docker_syslog_builder":
# An image for running on Docker configured to receive logs from other containers via syslog.
# This is the deprecated approach (but is still published under scalyr/scalyr-docker-agent for
# backward compatibility.) We also publish this under scalyr/scalyr-docker-agent-syslog to help
# with the eventual migration.
artifact_file_name = build_container_builder(
variant,
version,
no_versioned_file_name,
"scalyr-docker-agent.tar.gz",
"docker/Dockerfile.syslog",
"docker/docker-syslog-config",
"scalyr-docker-agent-syslog",
["scalyr/scalyr-agent-docker-syslog", "scalyr/scalyr-agent-docker"],
coverage_enabled=coverage_enabled,
)
elif package_type == "docker_json_builder":
# An image for running on Docker configured to fetch logs via the file system (the container log
# directory is mounted to the agent container.) This is the preferred way of running on Docker.
# This image is published to scalyr/scalyr-agent-docker-json.
artifact_file_name = build_container_builder(
variant,
version,
no_versioned_file_name,
"scalyr-docker-agent.tar.gz",
"docker/Dockerfile",
"docker/docker-json-config",
"scalyr-docker-agent-json",
["scalyr/scalyr-agent-docker-json"],
coverage_enabled=coverage_enabled,
)
elif package_type == "docker_api_builder":
# An image for running on Docker configured to fetch logs via the Docker API using
# docker_raw_logs: false configuration option.
artifact_file_name = build_container_builder(
variant,
version,
no_versioned_file_name,
"scalyr-docker-agent.tar.gz",
"docker/Dockerfile",
"docker/docker-api-config",
"scalyr-docker-agent-api",
["scalyr/scalyr-agent-docker-api"],
coverage_enabled=coverage_enabled,
)
elif package_type == "k8s_builder":
# An image for running the agent on Kubernetes.
artifact_file_name = build_container_builder(
variant,
version,
no_versioned_file_name,
"scalyr-k8s-agent.tar.gz",
"docker/Dockerfile.k8s",
"docker/k8s-config",
"scalyr-k8s-agent",
["scalyr/scalyr-k8s-agent"],
coverage_enabled=coverage_enabled,
)
else:
assert package_type in ("deb", "rpm")
artifact_file_name = build_rpm_or_deb_package(
package_type == "rpm", variant, version
)
os.chdir(original_cwd)
# Move the artifact (built package) to the original current working dir.
shutil.move(os.path.join(tmp_dir, artifact_file_name), artifact_file_name)
return artifact_file_name
finally:
# Be sure to delete the temporary directory.
os.chdir(original_cwd)
shutil.rmtree(tmp_dir)
# A GUID representing Scalyr products, used to generate a per-version guid for each version of the Windows
# Scalyr Agent. DO NOT MODIFY THIS VALUE, or already installed software on clients machines will not be able
# to be upgraded.
_scalyr_guid_ = uuid.UUID("{0b52b8a0-22c7-4d50-92c1-8ea3b258984e}")
def build_win32_installer_package(variant, version):
"""Builds an MSI that will install the agent on a win32 machine in the current working directory.
Note, this can only be run on a Windows machine with the proper binaries and packages installed.
@param variant: If not None, will add the specified string to the GUID used to identify the installed
executables. This can be used to avoid customer builds of the agent from colliding with the Scalyr-built
ones.
@param version: The agent version.
@return: The file name of the built package.
"""
if os.getenv("WIX") is None:
print(
"Error, the WIX toolset does not appear to be installed.", file=sys.stderr
)
print(
"Please install it to build the Windows Scalyr Agent installer.",
file=sys.stderr,
)
print("See http://wixtoolset.org.", file=sys.stderr)
sys.exit(1)
try:
import psutil # NOQA
except ImportError:
# noinspection PyUnusedLocal
print(
"Error, the psutil Python module is not installed. This is required to build the",
file=sys.stderr,
)
print(
"Windows version of the Scalyr Agent. Please download and install it.",
file=sys.stderr,
)
print("See http://pythonhosted.org/psutil/", file=sys.stderr)
print(
'On many systems, executing "pip install psutil" will install the package.',
file=sys.stderr,
)
sys.exit(1)
make_directory("source_root")
make_directory("data_files")
agent_source_root = __source_root__
# Populate source_root
os.chdir("source_root")
shutil.copytree(make_path(agent_source_root, "scalyr_agent"), "scalyr_agent")
# We have to move __scalyr__.py up to the top of the source_root since, when running in the environment
# generated by PyInstaller, an 'import __scalyr__.py' will not look in the current directory.. it will only look
# for that module at the top of the sources_root. Essentially, the PYTHONPATH variable only has a single
# entry in it, and it does not have '.' in it. We leave a copy of __scalyr__.py in the original scalyr_agent
# directory because we need it there when we execute setup.py. For the same reason, we put a copy of VERSION.
shutil.copy(convert_path("scalyr_agent/__scalyr__.py"), "__scalyr__.py")
shutil.copy(make_path(agent_source_root, "VERSION"), "VERSION")
shutil.copy(
make_path(agent_source_root, "VERSION"),
convert_path("scalyr_agent/VERSION"),
)
shutil.copytree(make_path(agent_source_root, "monitors"), "monitors")
os.chdir("monitors")
recursively_delete_files_by_name("README.md")
os.chdir("..")
# Exclude certain files.
# TODO: Should probably use MANIFEST.in to do this, but don't know the Python-fu to do this yet.
#
# Don't include the tests directories. Also, don't include the .idea directory created by IDE.
recursively_delete_dirs_by_name(r"\.idea", "tests")
recursively_delete_files_by_name(
r".*\.pyc", r".*\.pyo", r".*\.pyd", r"all_tests\.py", r".*~"
)
# Move back up to the root directory and populate the data_files.
os.chdir("..")
os.chdir("data_files")
# Copy the version file. We copy it both to the root and the package root. The package copy is done down below.
# make it VERSION.txt because PyInstaller on python 2 expects dll file named VERSION,
# and then fails with an error because of the invalid DLL loading.
shutil.copy(make_path(agent_source_root, "VERSION"), "VERSION.txt")
shutil.copy(make_path(agent_source_root, "LICENSE.txt"), "LICENSE.txt")
# Also add in install_info file
write_to_file(get_install_info("package"), "install_info.json")
# Copy the third party licenses
shutil.copytree(
make_path(agent_source_root, "scalyr_agent/third_party/licenses"), "licenses"
)
os.chdir("..")
# We need to place a 'setup.py' here so that when we executed py2exe it finds it.
shutil.copy(make_path(agent_source_root, "setup.py"), "setup.py")
shutil.copy(
make_path(agent_source_root, "DESCRIPTION.rst"),
convert_path("source_root/DESCRIPTION.rst"),
)
shutil.copy(
os.path.join(agent_source_root, "win32", "scalyr_agent.wxs"), "scalyr_agent.wxs"
)
agent_package_path = os.path.join(agent_source_root, "scalyr_agent")
add_data = {os.path.join("data_files", "install_info.json"): "scalyr_agent"}
# Add monitor modules as hidden imports, since they are not directly imported in the agent's code.
hidden_imports = [
"scalyr_agent.builtin_monitors.apache_monitor",
"scalyr_agent.builtin_monitors.graphite_monitor",
"scalyr_agent.builtin_monitors.mysql_monitor",
"scalyr_agent.builtin_monitors.nginx_monitor",
"scalyr_agent.builtin_monitors.shell_monitor",
"scalyr_agent.builtin_monitors.syslog_monitor",
"scalyr_agent.builtin_monitors.test_monitor",
"scalyr_agent.builtin_monitors.url_monitor",
"scalyr_agent.builtin_monitors.windows_event_log_monitor",
"scalyr_agent.builtin_monitors.windows_system_metrics",
"scalyr_agent.builtin_monitors.windows_process_metrics",
"scalyr_agent.builtin_monitors.openmetrics_monitor",
]
hidden_imports.extend(["win32timezone"])
# Add packages to frozen binary paths.
paths_to_include = [
os.path.join(agent_source_root, "scalyr_agent", "third_party"),
os.path.join(agent_source_root, "scalyr_agent", "third_party_python2"),
]
# Create --add-data options from previously added files.
add_data_options = []
for src, dest in add_data.items():
add_data_options.append("--add-data")
add_data_options.append("{}{}{}".format(src, os.path.pathsep, dest))
# Create --hidden-import options from previously created hidden imports list.
hidden_import_options = []
for h in hidden_imports:
hidden_import_options.append("--hidden-import")
hidden_import_options.append(str(h))
paths_options = []
for p in paths_to_include:
paths_options.extend(["--paths", p])
command = [
sys.executable,
"-m",
"PyInstaller",
os.path.join(agent_package_path, "agent_main.py"),
"--onefile",
"-n",
"scalyr-agent-2",
]
command.extend(add_data_options)
command.extend(hidden_import_options)
command.extend(paths_options)
command.extend(
[
"--exclude-module",
"asyncio",
"--exclude-module",
"FixTk",
"--exclude-module",
"tcl",
"--exclude-module",
"tk",
"--exclude-module",
"_tkinter",
"--exclude-module",
"tkinter",
"--exclude-module",
"Tkinter",
"--exclude-module",
"sqlite",
]
)
subprocess.check_call(command)
make_directory("Scalyr/certs")
make_directory("Scalyr/logs")
make_directory("Scalyr/data")
make_directory("Scalyr/config/agent.d")
make_directory("Scalyr/config/templates")
make_directory("Scalyr/bin")
# NOTE: We in intentionally set this permission bit for agent.d directory to make sure it's not
# readable by others.
os.chmod("Scalyr/config/agent.d", int("741", 8))
# Copy the config file.
agent_json_path = make_path(agent_source_root, "config/agent.json")
cat_files(
[agent_json_path],
"Scalyr/config/templates/agent_config.tmpl",
convert_newlines=True,
)
# NOTE: We in intentionally set this permission bit for agent.json to make sure it's not
# readable by others.
os.chmod("Scalyr/config/templates/agent_config.tmpl", int("640", 8))
shutil.copy(make_path(agent_source_root, "VERSION"), "Scalyr/VERSION")
# Copy frozen binary.
shutil.copy(os.path.join("dist", "scalyr-agent-2.exe"), "Scalyr/bin")
# Also copy the same binary as windows service binary.
# Even if we use the same binary for everything, I couldn't figure out how to make Wix
# reuse the same file for multiple components. (TODO: figure out how), but it seems that
# packager compression handles this well and does now increase package size.
shutil.copy(
os.path.join("dist", "scalyr-agent-2.exe"), "Scalyr/bin/ScalyrAgentService.exe"
)
shutil.copy(
os.path.join(agent_source_root, "win32/scalyr-agent-2-config.cmd"), "Scalyr/bin"
)
shutil.copy(
make_path(agent_source_root, "win32/ScalyrShell.cmd"),
"Scalyr/bin/ScalyrShell.cmd",
)
# Copy the cert files.
# AGENT-283: Certificate validation on windows seems to fail when the intermediate certs are present, skipping them
cat_files(
glob_files(make_path(agent_source_root, "certs/*_root.pem")),
"Scalyr/certs/ca_certs.crt",
convert_newlines=True,
)
# TODO: Check certificate expiration same as we do as part of tox lint target
# NOTE: This requires us to update Jenkins pipeline and other places where this script is called
# to install cryptography library
# Get ready to run wix. Add in WIX to the PATH variable.
os.environ["PATH"] = "%s;%s\\bin" % (os.getenv("PATH"), os.getenv("WIX"))
if variant is None:
variant = "main"
# Generate a unique identifier used to identify this version of the Scalyr Agent to windows.
product_code = create_scalyr_uuid3("ProductID:%s:%s" % (variant, version))
# The upgrade code identifies all families of versions that can be upgraded from one to the other. So, this
# should be a single number for all Scalyr produced ones.
upgrade_code = create_scalyr_uuid3("UpgradeCode:%s" % variant)
# For prereleases, we use weird version numbers like 4.0.4.pre5.1 . That does not work for Windows which
# requires X.X.X.X. So, we convert if necessary.
if len(version.split(".")) == 5:
parts = version.split(".")
del parts[3]
version = ".".join(parts)
run_command(
'candle -nologo -out ScalyrAgent.wixobj -dVERSION="%s" -dUPGRADECODE="%s" '
'-dPRODUCTCODE="%s" scalyr_agent.wxs' % (version, upgrade_code, product_code),
exit_on_fail=True,
command_name="candle",
)
installer_name = "ScalyrAgentInstaller-%s.msi" % version
run_command(
"light -nologo -ext WixUtilExtension.dll -ext WixUIExtension -out %s ScalyrAgent.wixobj -v"
% installer_name,
exit_on_fail=True,
command_name="light",
)
return installer_name
def create_wxs_file(template_path, dist_path, destination_path):
"""Performs a rewrite of the Wix file to replace template-like poritions with information about the
binaries/files in `dist_path`.
This is required so that our Windows installer includes all of the DLLs, Python compiled files, etc that PyInstaller
produced. This list can change over time and is dependent on the build machine, so we cannot hard code this
list. It must be determined dynamically.
The file is rewrite by expanding the 'templates' found between the '<!-- EXPAND_FROM_BIN' markers. This will
make a copy of the included template, once for each file in the `dist_path`, replacing such variables as
$COMPONENT_ID, $COMPONENT_GUID, $FILE_ID, and $FILE_SOURCE with values calculated on the file's information.
You may also specify a list of files to exclude in `dist_path` from the template expansion. This is used for
well-known files that are already in the Wix file.
Here is an example:
<!-- EXPAND_FROM_BIN EXCLUDE:scalyr-agent-2.exe,scalyr-agent-2-config.exe,ScalyrAgentService.exe -->
<Component Id='$COMPONENT_ID' Guid='$COMPONENT_GUID' >
<File Id='$FILE_ID' DiskId='1' KeyPath='yes' Checksum='yes' Source='$FILE_SOURCE' />
</Component>
<!-- EXPAND_FROM_BIN -->
@param template_path: The file path storing the Wix file to copy/rewrite.
@param dist_path: The path to the directory containing the files that should be included in the template
expansion.
@param destination_path: The file path to write the result
@type template_path: str
@type dist_path: str
@type destination_path: str
"""
# First, calculate all of the per-file information for each file in the distribution directory.
dist_files = []
for dist_file_path in glob.glob("%s/*" % dist_path):
base_file = os.path.basename(dist_file_path)
file_id = base_file.replace(".", "_").replace("-", "_")
entry = {
"BASE": base_file,
"FILE_ID": file_id,
"COMPONENT_GUID": str(create_scalyr_uuid3("DistComp%s" % base_file)),
"COMPONENT_ID": "%s_comp" % file_id,
"FILE_SOURCE": dist_file_path,
}
dist_files.append(entry)
# For the sake of easier coding, we read all of the lines of the input file into an array.
f = open(template_path)
try:
template_lines = f.readlines()
finally:
f.close()
# Now go through, looking for the markers, and when we find them, do the replacement.
result = []
while len(template_lines) > 0:
if "<!-- EXPAND_FROM_BIN" in template_lines[0]:
result.extend(expand_template(template_lines, dist_files))
else:
line = template_lines[0]
del template_lines[0]
result.append(line)
# Write the resulting lines out.
f = open(destination_path, "w")
try:
for line in result:
f.write(line)
finally:
f.close()
def create_scalyr_uuid3(name):
"""
Create a UUID based on the Scalyr UUID namespace and a hash of `name`.
:param name: The name
:type name: six.text
:return: The UUID
:rtype: uuid.UUID
"""
return scalyr_util.create_uuid3(_scalyr_guid_, name)
def expand_template(input_lines, dist_files):
"""Reads the template starting at the first entry in `input_lines` and generates a copy of it for each
item in `dist_files` that is not excluded.
Used by `create_wxs_file`.
This consumes the lines from the `input_lines` list.
@param input_lines: The list of input lines from the file, with the first beginning a template expansion
(should have the <!-- EXPAND_FROM_BIN pragma in it).
@param dist_files: The list of file entries from the distribution directory. The template should be expanded
once for each entry (unless it was specifically excluded).
@type input_lines: [str]
@type dist_files: [{}]
@return: The list of lines produced by the expansion.
@rtype: [str]
"""
# First, see if there were any files that should be excluded. This will be in the first line, prefaced by
# EXCLUDED and a comma separated list.
match = re.search(r"EXCLUDE:(\S*)", input_lines[0])
del input_lines[0]
if match is not None:
excluded_files = match.group(1).split(",")
else:
excluded_files = []
# Create a list of just the template. We need to find where it ends in the input lines.
template_lines = []
found_end = False
while len(input_lines) > 0:
line = input_lines[0]
del input_lines[0]
if "<!-- EXPAND_FROM_BIN" in line:
found_end = True
break
else:
template_lines.append(line)
if not found_end:
raise Exception("Did not find termination for EXPAND_FROM_BIN")
result = []
# Do the expansion.
for dist_entry in dist_files:
if dist_entry["BASE"] in excluded_files:
continue
for template_line in template_lines:
line = template_line.replace("$FILE_ID", dist_entry["FILE_ID"])
line = line.replace("$COMPONENT_GUID", dist_entry["COMPONENT_GUID"])
line = line.replace("$COMPONENT_ID", dist_entry["COMPONENT_ID"])
line = line.replace("$FILE_SOURCE", dist_entry["FILE_SOURCE"])
result.append(line)
return result
def build_common_docker_and_package_files(create_initd_link, base_configs=None):
"""Builds the common `root` system used by Debian, RPM, and container source tarballs in the current working
directory.
@param create_initd_link: Whether or not to create the link from initd to the scalyr agent binary.
@param base_configs: The directory (relative to the top of the source tree) that contains the configuration
files to copy (such as the agent.json and agent.d directory). If None, then will use `config`.
@type create_initd_link: bool
@type base_configs: str
"""
original_dir = os.getcwd()
# Create the directory structure for where the RPM/Debian package will place files on the system.
make_directory("root/etc/init.d")
make_directory("root/var/log/scalyr-agent-2")
make_directory("root/var/lib/scalyr-agent-2")
make_directory("root/usr/share")
make_directory("root/usr/sbin")
# Place all of the import source in /usr/share/scalyr-agent-2.
os.chdir("root/usr/share")
build_base_files(install_type="package", base_configs=base_configs)
os.chdir("scalyr-agent-2")
# The build_base_files leaves the config directory in config, but we have to move it to its etc
# location. We just rename it to the right directory.
shutil.move(
convert_path("config"), make_path(original_dir, "root/etc/scalyr-agent-2")
)
os.chdir(original_dir)
# Make sure there is an agent.d directory regardless of the config directory we used.
make_directory("root/etc/scalyr-agent-2/agent.d")
# NOTE: We in intentionally set this permission bit for agent.d directory to make sure it's not
# readable by others.
os.chmod("root/etc/scalyr-agent-2/agent.d", int("741", 8))
# Create the links to the appropriate commands in /usr/sbin and /etc/init.d/
if create_initd_link:
make_soft_link(
"/usr/share/scalyr-agent-2/bin/scalyr-agent-2",
"root/etc/init.d/scalyr-agent-2",
)
make_soft_link(
"/usr/share/scalyr-agent-2/bin/scalyr-agent-2", "root/usr/sbin/scalyr-agent-2"
)
make_soft_link(
"/usr/share/scalyr-agent-2/bin/scalyr-agent-2-config",
"root/usr/sbin/scalyr-agent-2-config",
)
make_soft_link(
"/usr/share/scalyr-agent-2/bin/scalyr-switch-python",
"root/usr/sbin/scalyr-switch-python",
)
def build_container_builder(
variant,
version,
no_versioned_file_name,
source_tarball,
dockerfile,
base_configs,
image_name,
image_repos,
coverage_enabled=False,
):
"""Builds an executable script in the current working directory that will build the container image for the various
Docker and Kubernetes targets. This script embeds all assets it needs in it so it can be a standalone artifact.
The script is based on `docker/scripts/container_builder_base.sh`. See that script for information on it can
be used.
@param variant: If not None, will add the specified string into the final script name. This allows for different
scripts to be built for the same type and same version.
@param version: The agent version.
@param no_versioned_file_name: True if the version number should not be embedded in the script's file name.
@param source_tarball: The filename for the source tarball (including the `.tar.gz` extension) that will
be built and then embedded in the artifact. The contents of the Dockerfile will determine what this
name should be.
@param dockerfile: The file path for the Dockerfile to embed in the script, relative to the top of the
agent source directory.
@param base_configs: The file path for the configuration to use when building the container image, relative
to the top of the agent source directory. This allows for different `agent.json` and `agent.d` directories
to be used for Kubernetes, docker, etc.
@param image_name: The name for the image that is being built. Will be used for the artifact's name.
@param image_repos: A list of repositories that should be added as tags to the image once it is built.
Each repository will have two tags added -- one for the specific agent version and one for `latest`.
@param coverage_enabled: Path Dockerfile to run agent with enabled coverage.
@return: The file name of the built artifact.
"""
build_container_tarball(source_tarball, base_configs=base_configs)
agent_source_root = __source_root__
# Make a copy of the right Dockerfile to embed in the script.
shutil.copy(make_path(agent_source_root, dockerfile), "Dockerfile")
# copy requirements file with dependencies for docker builds.
shutil.copy(
make_path(agent_source_root, os.path.join("docker", "requirements.txt")),
"requirements.txt",
)
if variant is None:
version_string = version
else:
version_string = "%s.%s" % (version, variant)
# Read the base builder script into memory
base_fp = open(
make_path(agent_source_root, "docker/scripts/container_builder_base.sh"), "r"
)
base_script = base_fp.read()
base_fp.close()
# The script has two lines defining environment variables (REPOSITORIES and TAGS) that we need to overwrite to
# set them to what we want. We'll just do some regex replace to do that.
base_script = re.sub(
r"\n.*OVERRIDE_REPOSITORIES.*\n",
'\nREPOSITORIES="%s"\n' % ",".join(image_repos),
base_script,
)
base_script = re.sub(
r"\n.*OVERRIDE_TAGS.*\n",
'\nTAGS="%s"\n' % "%s,latest" % version_string,
base_script,
)
if no_versioned_file_name:
output_name = image_name
else:
output_name = "%s-%s" % (image_name, version_string)
# Tar it up but hold the tarfile in memory. Note, if the source tarball really becomes massive, might have to
# rethink this.
tar_out = BytesIO()
tar = tarfile.open("assets.tar.gz", "w|gz", tar_out)
# if coverage enabled patch Dockerfile to install coverage package with pip.
if coverage_enabled:
with open("Dockerfile", "r") as file:
data = file.read()
new_dockerfile_source = re.sub(r"(RUN\spip\s.*)", r"\1 coverage==4.5.4", data)
new_dockerfile_source = re.sub(
r"CMD .*\n",
'CMD ["coverage", "run", "--branch", "/usr/share/scalyr-agent-2/py/scalyr_agent/agent_main.py", '
'"--no-fork", "--no-change-user", "start"]',
new_dockerfile_source,
)
with open("Dockerfile", "w") as file:
file.write(new_dockerfile_source)
tar.add("Dockerfile")
tar.add("requirements.txt")
tar.add(source_tarball)
tar.close()
# Write one file that has the contents of the script followed by the contents of the tarfile.
builder_fp = open(output_name, "wb")
builder_fp.write(base_script.encode("utf-8"))
builder_fp.write(tar_out.getvalue())
builder_fp.close()
# Make the script executable.
st = os.stat(output_name)
os.chmod(output_name, st.st_mode | stat.S_IEXEC | stat.S_IXGRP)
return output_name
def build_container_tarball(tarball_name, base_configs=None):
"""Builds the scalyr-agent-2 tarball for either Docker or Kubernetes in the current working directory.
@param tarball_name: The name for the output tarball (including the `.tar.gz` extension)
@param base_configs: The directory (relative to the top of the source tree) that contains the configuration
files to copy (such as the agent.json and agent.d directory). If None, then will use `config`.
@type tarball_name: str
@type base_configs: str
@return: The file name of the built tarball.
"""
build_common_docker_and_package_files(False, base_configs=base_configs)
# Need to create some docker specific files
make_directory("root/var/log/scalyr-agent-2/containers")
# Tar it up.
tar = tarfile.open(tarball_name, "w:gz")
original_dir = os.getcwd()
os.chdir("root")
# Do a manual walk over the contents of root so that we can use `addfile` to add the tarfile... which allows
# us to reset the owner/group to root. This might not be that portable to Windows, but for now, Docker is mainly
# Posix.
for root, dirs, files in os.walk("."):
to_copy = []
for name in dirs:
to_copy.append(os.path.join(root, name))
for name in files:
to_copy.append(os.path.join(root, name))
for x in to_copy:
file_entry = tar.gettarinfo(x)
file_entry.uname = "root"
file_entry.gname = "root"
file_entry.uid = 0
file_entry.gid = 0
if file_entry.isreg():
fp = open(file_entry.name, "rb")
tar.addfile(file_entry, fp)
fp.close()
else:
tar.addfile(file_entry)
os.chdir(original_dir)
tar.close()
return tarball_name
def build_rpm_or_deb_package(is_rpm, variant, version):
"""Builds either an RPM or Debian package in the current working directory.
@param is_rpm: True if an RPM should be built. Otherwise a Debian package will be built.
@param variant: If not None, will add the specified string into the iteration identifier for the package. This
allows for different packages to be built for the same type and same version.
@param version: The agent version.
@return: The file name of the built package.
"""
build_common_docker_and_package_files(True)
# Create the scriplets the RPM/Debian package invokes when uninstalling or upgrading.
create_scriptlets()
# Produce the change logs that we will embed in the package, based on the CHANGELOG.md in this directory.
create_change_logs()
if is_rpm:
package_type = "rpm"
else:
package_type = "deb"
# Only change the iteration label if we need to embed a variant.
if variant is not None:
iteration_arg = "--iteration 1.%s" % variant
else:
iteration_arg = ""
description = (
"Scalyr Agent 2 is the daemon process Scalyr customers run on their servers to collect metrics and "
"log files and transmit them to Scalyr."
)
# Workaround for our ancient builder VM so we can use --deb-use-file-permissions flag and make
# sure it works correctly - we need to make sure root user is owner for the files which are
# packaged
# NOTE: We only need this workaround for debian packages and not rpm ones.
username = getpass.getuser()
if username in ["rpmbuilder", "circleci"] and not is_rpm:
print("Using builder VM sudo workaround for file ownership issue")
use_sudo = True
sudo_command_string = "sudo "
else:
use_sudo = False
sudo_command_string = ""
if use_sudo:
run_command("sudo chown -R root:root .")
run_command(
'%s fpm -s dir -a all -t %s -n "scalyr-agent-2" -v %s '
' --license "Apache 2.0" '
" --vendor Scalyr %s "
" --maintainer czerwin@scalyr.com "
" --provides scalyr-agent-2 "
' --description "%s" '
' --depends "bash >= 3.2" '
" --url https://www.scalyr.com "
" --deb-user root "
" --deb-group root "
" --deb-changelog changelog-deb "
" --rpm-user root "
" --rpm-group root "
" --rpm-changelog changelog-rpm"
" --before-install preinstall.sh "
" --after-install postinstall.sh "
" --before-remove preuninstall.sh "
" --deb-no-default-config-files "
" --no-deb-auto-config-files "
" --config-files /etc/scalyr-agent-2/agent.json "
# NOTE: We leave those two files in place since they are symlinks which might have been
# updated by scalyr-switch-python and we want to leave this in place - aka make sure
# selected Python version is preserved on upgrade
" --config-files /usr/share/scalyr-agent-2/bin/scalyr-agent-2 "
# " --config-files /usr/share/scalyr-agent-2/bin/scalyr-agent-2-config "
" --directories /usr/share/scalyr-agent-2 "
" --directories /var/lib/scalyr-agent-2 "
" --directories /var/log/scalyr-agent-2 "
# NOTE 1: By default fpm won't preserve all the permissions we set on the files so we need
# to use those flags.
# If we don't do that, fpm will use 77X for directories and we don't really want 7 for
# "group" and it also means config file permissions won't be correct.
# NOTE 2: This is commented out since it breaks builds produced on builder VM where
# build_package.py runs as rpmbuilder user (uid 1001) and that uid is preserved as file
# owner for the package tarball file which breaks things.
# On Circle CI uid of the user under which the package job runs is 0 aka root so it works
# fine.
# We don't run fpm as root on builder VM which means we can't use any other workaround.
# Commenting this flag out means that original file permissions (+ownership) won't be
# preserved which means we will also rely on postinst step fixing permissions for fresh /
# new installations since those permissions won't be correct in the package artifact itself.
# Not great.
# Once we move all the build steps to Circle CI and ensure build_package.py runs as uid 0
# we should uncomment this.
# In theory it should work wth --*-user fpm flag, but it doesn't. Keep in mind that the
# issue only applies to deb packages since --rpm-user and --rpm-root flag override the user
# even if the --rpm-use-file-permissions flag is used.
# " --rpm-use-file-permissions "
" --rpm-use-file-permissions --deb-use-file-permissions "
# NOTE: Sadly we can't use defattrdir since it breakes permissions for some other
# directories such as /etc/init.d and we need to handle that in postinst :/
# " --rpm-auto-add-directories "
# " --rpm-defattrfile 640"
# " --rpm-defattrdir 751"
" --verbose"
" -C root usr etc var"
% (sudo_command_string, package_type, version, iteration_arg, description),
exit_on_fail=True,
command_name="fpm",
)
# We need to make sure that after we package everything up in deb, we restore the permissions
# so Jenkins can access the files
if use_sudo:
run_command("sudo chown -R %s:%s ." % (username, username))
# We determine the artifact name in a little bit of loose fashion.. we just glob over the current
# directory looking for something either ending in .rpm or .deb. There should only be one package,
# so that is fine.
if is_rpm:
files = glob.glob("*.rpm")
else:
files = glob.glob("*.deb")
if len(files) != 1:
raise Exception(
"Could not find resulting rpm or debian package in the build directory."
)
return files[0]
def build_tarball_package(variant, version, no_versioned_file_name):
"""Builds the scalyr-agent-2 tarball in the current working directory.
@param variant: If not None, will add the specified string into the final tarball name. This allows for different
tarballs to be built for the same type and same version.
@param version: The agent version.
@param no_versioned_file_name: True if the version number should not be embedded in the artifact's file name.
@return: The file name of the built tarball.
"""
# Use build_base_files to build all of the important stuff in ./scalyr-agent-2
build_base_files(install_type="tar")
# Build the rest of the directories required for the tarball install. Mainly, the log and data directories
# in the tarball itself where the running process will store its state.
make_directory("scalyr-agent-2/data")
make_directory("scalyr-agent-2/log")
make_directory("scalyr-agent-2/config/agent.d")
# NOTE: We in intentionally set this permission bit for agent.d directory to make sure it's not
# readable by others.
os.chmod("scalyr-agent-2/config/agent.d", int("741", 8))
# Create a file named packageless. This signals to the agent that
# this a tarball install instead of an RPM/Debian install, which changes
# the default paths for the config, logs, data, etc directories. See