Line 0
Link Here
|
|
|
1 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/SConstruct ./SConstruct |
2 |
--- /data/ports-build/mongodb-src-r4.0.21/SConstruct 2020-11-15 22:50:25.001942000 +0100 |
3 |
+++ ./SConstruct 2020-11-16 20:09:38.793885000 +0100 |
4 |
@@ -28,8 +28,8 @@ |
5 |
import mongo.toolchain as mongo_toolchain |
6 |
import mongo.generators as mongo_generators |
7 |
|
8 |
-EnsurePythonVersion(2, 7) |
9 |
-EnsureSConsVersion(2, 5) |
10 |
+EnsurePythonVersion(3, 5) |
11 |
+EnsureSConsVersion(3, 0, 4) |
12 |
|
13 |
from buildscripts import utils |
14 |
from buildscripts import moduleconfig |
15 |
@@ -435,7 +435,7 @@ |
16 |
} |
17 |
|
18 |
add_option('win-version-min', |
19 |
- choices=win_version_min_choices.keys(), |
20 |
+ choices=list(win_version_min_choices.keys()), |
21 |
default=None, |
22 |
help='minimum Windows version to support', |
23 |
type='choice', |
24 |
@@ -547,7 +547,7 @@ |
25 |
except IOError as e: |
26 |
# If the file error wasn't because the file is missing, error out |
27 |
if e.errno != errno.ENOENT: |
28 |
- print("Error opening version.json: {0}".format(e.strerror)) |
29 |
+ print(("Error opening version.json: {0}".format(e.strerror))) |
30 |
Exit(1) |
31 |
|
32 |
version_data = { |
33 |
@@ -556,14 +556,14 @@ |
34 |
} |
35 |
|
36 |
except ValueError as e: |
37 |
- print("Error decoding version.json: {0}".format(e)) |
38 |
+ print(("Error decoding version.json: {0}".format(e))) |
39 |
Exit(1) |
40 |
|
41 |
# Setup the command-line variables |
42 |
def variable_shlex_converter(val): |
43 |
# If the argument is something other than a string, propogate |
44 |
# it literally. |
45 |
- if not isinstance(val, basestring): |
46 |
+ if not isinstance(val, str): |
47 |
return val |
48 |
parse_mode = get_option('variable-parse-mode') |
49 |
if parse_mode == 'auto': |
50 |
@@ -627,7 +627,7 @@ |
51 |
|
52 |
variables_files = variable_shlex_converter(get_option('variables-files')) |
53 |
for file in variables_files: |
54 |
- print("Using variable customization file %s" % file) |
55 |
+ print(("Using variable customization file %s" % file)) |
56 |
|
57 |
env_vars = Variables( |
58 |
files=variables_files, |
59 |
@@ -636,7 +636,7 @@ |
60 |
|
61 |
sconsflags = os.environ.get('SCONSFLAGS', None) |
62 |
if sconsflags: |
63 |
- print("Using SCONSFLAGS environment variable arguments: %s" % sconsflags) |
64 |
+ print(("Using SCONSFLAGS environment variable arguments: %s" % sconsflags)) |
65 |
|
66 |
env_vars.Add('ABIDW', |
67 |
help="Configures the path to the 'abidw' (a libabigail) utility") |
68 |
@@ -770,7 +770,7 @@ |
69 |
def validate_mongo_version(key, val, env): |
70 |
regex = r'^(\d+)\.(\d+)\.(\d+)-?((?:(rc)(\d+))?.*)?' |
71 |
if not re.match(regex, val): |
72 |
- print("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val)) |
73 |
+ print(("Invalid MONGO_VERSION '{}', or could not derive from version.json or git metadata. Please add a conforming MONGO_VERSION=x.y.z[-extra] as an argument to SCons".format(val))) |
74 |
Exit(1) |
75 |
|
76 |
env_vars.Add('MONGO_VERSION', |
77 |
@@ -901,12 +901,12 @@ |
78 |
Exit(1) |
79 |
|
80 |
sconsDataDir = Dir(buildDir).Dir('scons') |
81 |
-SConsignFile(str(sconsDataDir.File('sconsign'))) |
82 |
+SConsignFile(str(sconsDataDir.File('sconsign.py3'))) |
83 |
|
84 |
def printLocalInfo(): |
85 |
import sys, SCons |
86 |
- print( "scons version: " + SCons.__version__ ) |
87 |
- print( "python version: " + " ".join( [ `i` for i in sys.version_info ] ) ) |
88 |
+ print(( "scons version: " + SCons.__version__ )) |
89 |
+ print(( "python version: " + " ".join( [ repr(i) for i in sys.version_info ] ) )) |
90 |
|
91 |
printLocalInfo() |
92 |
|
93 |
@@ -986,12 +986,12 @@ |
94 |
env.AddMethod(mongo_platform.env_get_os_name_wrapper, 'GetTargetOSName') |
95 |
|
96 |
def fatal_error(env, msg, *args): |
97 |
- print(msg.format(*args)) |
98 |
+ print((msg.format(*args))) |
99 |
Exit(1) |
100 |
|
101 |
def conf_error(env, msg, *args): |
102 |
- print(msg.format(*args)) |
103 |
- print("See {0} for details".format(env.File('$CONFIGURELOG').abspath)) |
104 |
+ print((msg.format(*args))) |
105 |
+ print(("See {0} for details".format(env.File('$CONFIGURELOG').abspath))) |
106 |
Exit(1) |
107 |
|
108 |
env.AddMethod(fatal_error, 'FatalError') |
109 |
@@ -1010,12 +1010,12 @@ |
110 |
env.AddMethod(lambda env: env['VERBOSE'], 'Verbose') |
111 |
|
112 |
if has_option('variables-help'): |
113 |
- print(env_vars.GenerateHelpText(env)) |
114 |
+ print((env_vars.GenerateHelpText(env))) |
115 |
Exit(0) |
116 |
|
117 |
#unknown_vars = env_vars.UnknownVariables() |
118 |
#if unknown_vars: |
119 |
-# env.FatalError("Unknown variables specified: {0}", ", ".join(unknown_vars.keys())) |
120 |
+# env.FatalError("Unknown variables specified: {0}", ", ".join(list(unknown_vars.keys()))) |
121 |
|
122 |
def set_config_header_define(env, varname, varval = 1): |
123 |
env['CONFIG_HEADER_DEFINES'][varname] = varval |
124 |
@@ -1100,7 +1100,7 @@ |
125 |
context.Result(ret) |
126 |
return ret; |
127 |
|
128 |
- for k in processor_macros.keys(): |
129 |
+ for k in list(processor_macros.keys()): |
130 |
ret = run_compile_check(k) |
131 |
if ret: |
132 |
context.Result('Detected a %s processor' % k) |
133 |
@@ -1222,7 +1222,7 @@ |
134 |
env['TARGET_ARCH'] = detected_processor |
135 |
|
136 |
if env['TARGET_OS'] not in os_macros: |
137 |
- print("No special config for [{0}] which probably means it won't work".format(env['TARGET_OS'])) |
138 |
+ print(("No special config for [{0}] which probably means it won't work".format(env['TARGET_OS']))) |
139 |
elif not detectConf.CheckForOS(env['TARGET_OS']): |
140 |
env.ConfError("TARGET_OS ({0}) is not supported by compiler", env['TARGET_OS']) |
141 |
|
142 |
@@ -2081,7 +2081,7 @@ |
143 |
# form -Wno-xxx (but not -Wno-error=xxx), we also add -Wxxx to the flags. GCC does |
144 |
# warn on unknown -Wxxx style flags, so this lets us probe for availablity of |
145 |
# -Wno-xxx. |
146 |
- for kw in test_mutation.keys(): |
147 |
+ for kw in list(test_mutation.keys()): |
148 |
test_flags = test_mutation[kw] |
149 |
for test_flag in test_flags: |
150 |
if test_flag.startswith("-Wno-") and not test_flag.startswith("-Wno-error="): |
151 |
@@ -2095,7 +2095,7 @@ |
152 |
# to make them real errors. |
153 |
cloned.Append(CCFLAGS=['-Werror']) |
154 |
conf = Configure(cloned, help=False, custom_tests = { |
155 |
- 'CheckFlag' : lambda(ctx) : CheckFlagTest(ctx, tool, extension, flag) |
156 |
+ 'CheckFlag' : lambda ctx : CheckFlagTest(ctx, tool, extension, flag) |
157 |
}) |
158 |
available = conf.CheckFlag() |
159 |
conf.Finish() |
160 |
@@ -2611,7 +2611,7 @@ |
161 |
llvm_symbolizer = get_option('llvm-symbolizer') |
162 |
if os.path.isabs(llvm_symbolizer): |
163 |
if not myenv.File(llvm_symbolizer).exists(): |
164 |
- print("WARNING: Specified symbolizer '%s' not found" % llvm_symbolizer) |
165 |
+ print(("WARNING: Specified symbolizer '%s' not found" % llvm_symbolizer)) |
166 |
llvm_symbolizer = None |
167 |
else: |
168 |
llvm_symbolizer = myenv.WhereIs(llvm_symbolizer) |
169 |
@@ -2922,7 +2922,7 @@ |
170 |
# TODO: If we could programmatically extract the paths from the info output |
171 |
# we could give a better message here, but brew info's machine readable output |
172 |
# doesn't seem to include the whole 'caveats' section. |
173 |
- message = subprocess.check_output([brew, "info", "openssl"]) |
174 |
+ message = subprocess.check_output([brew, "info", "openssl"]).decode('utf-8') |
175 |
advice = textwrap.dedent( |
176 |
"""\ |
177 |
NOTE: HomeBrew installed to {0} appears to have OpenSSL installed. |
178 |
@@ -3082,7 +3082,7 @@ |
179 |
files = ['ssleay32.dll', 'libeay32.dll'] |
180 |
for extra_file in files: |
181 |
if not addOpenSslLibraryToDistArchive(extra_file): |
182 |
- print("WARNING: Cannot find SSL library '%s'" % extra_file) |
183 |
+ print(("WARNING: Cannot find SSL library '%s'" % extra_file)) |
184 |
|
185 |
|
186 |
|
187 |
@@ -3423,7 +3423,7 @@ |
188 |
|
189 |
outputIndex = next((idx for idx in [0,1] if conf.CheckAltivecVbpermqOutput(idx)), None) |
190 |
if outputIndex is not None: |
191 |
- conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex) |
192 |
+ conf.env.SetConfigHeaderDefine("MONGO_CONFIG_ALTIVEC_VEC_VBPERMQ_OUTPUT_INDEX", outputIndex) |
193 |
else: |
194 |
myenv.ConfError("Running on ppc64le, but can't find a correct vec_vbpermq output index. Compiler or platform not supported") |
195 |
|
196 |
@@ -3523,9 +3523,12 @@ |
197 |
import buildscripts.pylinters |
198 |
buildscripts.pylinters.lint_all(None, {}, []) |
199 |
|
200 |
- import buildscripts.lint |
201 |
- if not buildscripts.lint.run_lint( [ "src/mongo/" ] ): |
202 |
- raise Exception( "lint errors" ) |
203 |
+ env.Command( |
204 |
+ target="#run_lint", |
205 |
+ source=["buildscripts/lint.py", "src/mongo"], |
206 |
+ action="$PYTHON $SOURCES[0] $SOURCES[1]", |
207 |
+ ) |
208 |
+ |
209 |
|
210 |
env.Alias( "lint" , [] , [ doLint ] ) |
211 |
env.AlwaysBuild( "lint" ) |
212 |
Only in .: build |
213 |
Only in ./buildscripts: __pycache__ |
214 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/aggregate_tracefiles.py ./buildscripts/aggregate_tracefiles.py |
215 |
--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/aggregate_tracefiles.py 2020-10-16 18:02:58.000000000 +0200 |
216 |
+++ ./buildscripts/aggregate_tracefiles.py 2020-11-16 20:09:38.798317000 +0100 |
217 |
@@ -20,7 +20,7 @@ |
218 |
|
219 |
args += ['-o', output] |
220 |
|
221 |
- print ' '.join(args) |
222 |
+ print(' '.join(args)) |
223 |
|
224 |
return subprocess.call(args) |
225 |
|
226 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/aws_ec2.py ./buildscripts/aws_ec2.py |
227 |
--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/aws_ec2.py 2020-10-16 18:02:58.000000000 +0200 |
228 |
+++ ./buildscripts/aws_ec2.py 2020-11-16 20:09:38.801388000 +0100 |
229 |
@@ -1,8 +1,8 @@ |
230 |
#!/usr/bin/env python |
231 |
"""AWS EC2 instance launcher and controller.""" |
232 |
|
233 |
-from __future__ import print_function |
234 |
|
235 |
+ |
236 |
import base64 |
237 |
import collections |
238 |
import datetime |
239 |
@@ -88,12 +88,13 @@ |
240 |
if reached_state: |
241 |
print(" Instance {}!".format(instance.state["Name"]), file=sys.stdout) |
242 |
else: |
243 |
- print(" Instance in state '{}', failed to reach state '{}'{}!".format( |
244 |
- instance.state["Name"], state, client_error), file=sys.stdout) |
245 |
+ print( |
246 |
+ " Instance in state '{}', failed to reach state '{}'{}!".format( |
247 |
+ instance.state["Name"], state, client_error), file=sys.stdout) |
248 |
sys.stdout.flush() |
249 |
return 0 if reached_state else 1 |
250 |
|
251 |
- def control_instance( #pylint: disable=too-many-arguments,too-many-branches |
252 |
+ def control_instance( #pylint: disable=too-many-arguments,too-many-branches,too-many-locals |
253 |
self, mode, image_id, wait_time_secs=0, show_progress=False, console_output_file=None, |
254 |
console_screenshot_file=None): |
255 |
"""Control an AMI instance. Returns 0 & status information, if successful.""" |
256 |
@@ -296,14 +297,15 @@ |
257 |
status_options.add_option("--yamlFile", dest="yaml_file", default=None, |
258 |
help="Save the status into the specified YAML file.") |
259 |
|
260 |
- status_options.add_option("--consoleOutputFile", dest="console_output_file", default=None, |
261 |
- help="Save the console output into the specified file, if" |
262 |
- " available.") |
263 |
+ status_options.add_option( |
264 |
+ "--consoleOutputFile", dest="console_output_file", default=None, |
265 |
+ help="Save the console output into the specified file, if" |
266 |
+ " available.") |
267 |
|
268 |
- status_options.add_option("--consoleScreenshotFile", dest="console_screenshot_file", |
269 |
- default=None, |
270 |
- help="Save the console screenshot (JPG format) into the specified" |
271 |
- " file, if available.") |
272 |
+ status_options.add_option( |
273 |
+ "--consoleScreenshotFile", dest="console_screenshot_file", default=None, |
274 |
+ help="Save the console screenshot (JPG format) into the specified" |
275 |
+ " file, if available.") |
276 |
|
277 |
parser.add_option_group(control_options) |
278 |
parser.add_option_group(create_options) |
279 |
@@ -328,7 +330,6 @@ |
280 |
parser.error("Block size must be an integer") |
281 |
block_devices[device_name] = device_size |
282 |
|
283 |
- # The 'expire-on' key is a UTC time. |
284 |
expire_dt = datetime.datetime.utcnow() + datetime.timedelta(hours=options.tag_expire_hours) |
285 |
tags = [{"Key": "expire-on", "Value": expire_dt.strftime("%Y-%m-%d %H:%M:%S")}, |
286 |
{"Key": "Name", |
287 |
Only in ./buildscripts/idl/idl: __pycache__ |
288 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/compiler.py ./buildscripts/idl/idl/compiler.py |
289 |
--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/compiler.py 2020-10-16 18:02:58.000000000 +0200 |
290 |
+++ ./buildscripts/idl/idl/compiler.py 2020-11-16 20:09:38.804248000 +0100 |
291 |
@@ -31,8 +31,6 @@ |
292 |
Orchestrates the 3 passes (parser, binder, and generator) together. |
293 |
""" |
294 |
|
295 |
-from __future__ import absolute_import, print_function, unicode_literals |
296 |
- |
297 |
import io |
298 |
import logging |
299 |
import os |
300 |
@@ -70,14 +68,14 @@ |
301 |
"""Class for the IDL compiler to resolve imported files.""" |
302 |
|
303 |
def __init__(self, import_directories): |
304 |
- # type: (List[unicode]) -> None |
305 |
+ # type: (List[str]) -> None |
306 |
"""Construct a ImportResolver.""" |
307 |
self._import_directories = import_directories |
308 |
|
309 |
super(CompilerImportResolver, self).__init__() |
310 |
|
311 |
def resolve(self, base_file, imported_file_name): |
312 |
- # type: (unicode, unicode) -> unicode |
313 |
+ # type: (str, str) -> str |
314 |
"""Return the complete path to an imported file name.""" |
315 |
|
316 |
logging.debug("Resolving imported file '%s' for file '%s'", imported_file_name, base_file) |
317 |
@@ -108,7 +106,7 @@ |
318 |
raise errors.IDLError(msg) |
319 |
|
320 |
def open(self, resolved_file_name): |
321 |
- # type: (unicode) -> Any |
322 |
+ # type: (str) -> Any |
323 |
"""Return an io.Stream for the requested file.""" |
324 |
return io.open(resolved_file_name, encoding='utf-8') |
325 |
|
326 |
@@ -125,7 +123,7 @@ |
327 |
|
328 |
|
329 |
def _update_import_includes(args, spec, header_file_name): |
330 |
- # type: (CompilerArgs, syntax.IDLSpec, unicode) -> None |
331 |
+ # type: (CompilerArgs, syntax.IDLSpec, str) -> None |
332 |
"""Update the list of imports with a list of include files for each import with structs.""" |
333 |
# This function is fragile: |
334 |
# In order to try to generate headers with an "include what you use" set of headers, the IDL |
335 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/syntax.py ./buildscripts/idl/idl/syntax.py |
336 |
--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idl/syntax.py 2020-10-16 18:02:58.000000000 +0200 |
337 |
+++ ./buildscripts/idl/idl/syntax.py 2020-11-16 20:09:38.806128000 +0100 |
338 |
@@ -33,8 +33,6 @@ |
339 |
it follows the rules of the IDL, etc. |
340 |
""" |
341 |
|
342 |
-from __future__ import absolute_import, print_function, unicode_literals |
343 |
- |
344 |
import itertools |
345 |
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union |
346 |
|
347 |
@@ -70,7 +68,7 @@ |
348 |
|
349 |
|
350 |
def parse_array_type(name): |
351 |
- # type: (unicode) -> unicode |
352 |
+ # type: (str) -> str |
353 |
"""Parse a type name of the form 'array<type>' and extract type.""" |
354 |
if not name.startswith("array<") and not name.endswith(">"): |
355 |
return None |
356 |
@@ -95,7 +93,7 @@ |
357 |
# type: (Dict[Any, List[Any]]) -> Iterator[Tuple[Any, Any]] |
358 |
"""Return an Iterator of (key, value) pairs from a dictionary.""" |
359 |
return itertools.chain.from_iterable( |
360 |
- (_zip_scalar(value, key) for (key, value) in dic.viewitems())) |
361 |
+ (_zip_scalar(value, key) for (key, value) in dic.items())) |
362 |
|
363 |
|
364 |
class SymbolTable(object): |
365 |
@@ -115,7 +113,7 @@ |
366 |
self.types = [] # type: List[Type] |
367 |
|
368 |
def _is_duplicate(self, ctxt, location, name, duplicate_class_name): |
369 |
- # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> bool |
370 |
+ # type: (errors.ParserContext, common.SourceLocation, str, str) -> bool |
371 |
"""Return true if the given item already exist in the symbol table.""" |
372 |
for (item, entity_type) in _item_and_type({ |
373 |
"command": self.commands, |
374 |
@@ -179,12 +177,12 @@ |
375 |
self.add_type(ctxt, idltype) |
376 |
|
377 |
def resolve_field_type(self, ctxt, location, field_name, type_name): |
378 |
- # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> Optional[Union[Command, Enum, Struct, Type]] |
379 |
+ # type: (errors.ParserContext, common.SourceLocation, str, str) -> Optional[Union[Command, Enum, Struct, Type]] |
380 |
"""Find the type or struct a field refers to or log an error.""" |
381 |
return self._resolve_field_type(ctxt, location, field_name, type_name) |
382 |
|
383 |
def _resolve_field_type(self, ctxt, location, field_name, type_name): |
384 |
- # type: (errors.ParserContext, common.SourceLocation, unicode, unicode) -> Optional[Union[Command, Enum, Struct, Type]] |
385 |
+ # type: (errors.ParserContext, common.SourceLocation, str, str) -> Optional[Union[Command, Enum, Struct, Type]] |
386 |
"""Find the type or struct a field refers to or log an error.""" |
387 |
# pylint: disable=too-many-return-statements |
388 |
|
389 |
@@ -237,15 +235,15 @@ |
390 |
"""IDL imports object.""" |
391 |
|
392 |
def __init__(self, file_name, line, column): |
393 |
- # type: (unicode, int, int) -> None |
394 |
+ # type: (str, int, int) -> None |
395 |
"""Construct an Imports section.""" |
396 |
- self.imports = [] # type: List[unicode] |
397 |
+ self.imports = [] # type: List[str] |
398 |
|
399 |
# These are not part of the IDL syntax but are produced by the parser. |
400 |
# List of imports with structs. |
401 |
- self.resolved_imports = [] # type: List[unicode] |
402 |
+ self.resolved_imports = [] # type: List[str] |
403 |
# All imports directly or indirectly included |
404 |
- self.dependencies = [] # type: List[unicode] |
405 |
+ self.dependencies = [] # type: List[str] |
406 |
|
407 |
super(Import, self).__init__(file_name, line, column) |
408 |
|
409 |
@@ -262,16 +260,16 @@ |
410 |
# pylint: disable=too-many-instance-attributes |
411 |
|
412 |
def __init__(self, file_name, line, column): |
413 |
- # type: (unicode, int, int) -> None |
414 |
+ # type: (str, int, int) -> None |
415 |
"""Construct a Type.""" |
416 |
- self.name = None # type: unicode |
417 |
- self.description = None # type: unicode |
418 |
- self.cpp_type = None # type: unicode |
419 |
- self.bson_serialization_type = None # type: List[unicode] |
420 |
- self.bindata_subtype = None # type: unicode |
421 |
- self.serializer = None # type: unicode |
422 |
- self.deserializer = None # type: unicode |
423 |
- self.default = None # type: unicode |
424 |
+ self.name = None # type: str |
425 |
+ self.description = None # type: str |
426 |
+ self.cpp_type = None # type: str |
427 |
+ self.bson_serialization_type = None # type: List[str] |
428 |
+ self.bindata_subtype = None # type: str |
429 |
+ self.serializer = None # type: str |
430 |
+ self.deserializer = None # type: str |
431 |
+ self.default = None # type: str |
432 |
|
433 |
super(Type, self).__init__(file_name, line, column) |
434 |
|
435 |
@@ -288,15 +286,15 @@ |
436 |
# pylint: disable=too-many-instance-attributes |
437 |
|
438 |
def __init__(self, file_name, line, column): |
439 |
- # type: (unicode, int, int) -> None |
440 |
+ # type: (str, int, int) -> None |
441 |
"""Construct a Field.""" |
442 |
- self.name = None # type: unicode |
443 |
- self.cpp_name = None # type: unicode |
444 |
- self.description = None # type: unicode |
445 |
- self.type = None # type: unicode |
446 |
+ self.name = None # type: str |
447 |
+ self.cpp_name = None # type: str |
448 |
+ self.description = None # type: str |
449 |
+ self.type = None # type: str |
450 |
self.ignore = False # type: bool |
451 |
self.optional = False # type: bool |
452 |
- self.default = None # type: unicode |
453 |
+ self.default = None # type: str |
454 |
self.supports_doc_sequence = False # type: bool |
455 |
self.comparison_order = -1 # type: int |
456 |
self.non_const_getter = False # type: bool |
457 |
@@ -316,10 +314,10 @@ |
458 |
""" |
459 |
|
460 |
def __init__(self, file_name, line, column): |
461 |
- # type: (unicode, int, int) -> None |
462 |
+ # type: (str, int, int) -> None |
463 |
"""Construct a Type.""" |
464 |
- self.name = None # type: unicode |
465 |
- self.cpp_name = None # type: unicode |
466 |
+ self.name = None # type: str |
467 |
+ self.cpp_name = None # type: str |
468 |
|
469 |
super(ChainedStruct, self).__init__(file_name, line, column) |
470 |
|
471 |
@@ -332,10 +330,10 @@ |
472 |
""" |
473 |
|
474 |
def __init__(self, file_name, line, column): |
475 |
- # type: (unicode, int, int) -> None |
476 |
+ # type: (str, int, int) -> None |
477 |
"""Construct a Type.""" |
478 |
- self.name = None # type: unicode |
479 |
- self.cpp_name = None # type: unicode |
480 |
+ self.name = None # type: str |
481 |
+ self.cpp_name = None # type: str |
482 |
|
483 |
super(ChainedType, self).__init__(file_name, line, column) |
484 |
|
485 |
@@ -350,10 +348,10 @@ |
486 |
# pylint: disable=too-many-instance-attributes |
487 |
|
488 |
def __init__(self, file_name, line, column): |
489 |
- # type: (unicode, int, int) -> None |
490 |
+ # type: (str, int, int) -> None |
491 |
"""Construct a Struct.""" |
492 |
- self.name = None # type: unicode |
493 |
- self.description = None # type: unicode |
494 |
+ self.name = None # type: str |
495 |
+ self.description = None # type: str |
496 |
self.strict = True # type: bool |
497 |
self.immutable = False # type: bool |
498 |
self.inline_chained_structs = True # type: bool |
499 |
@@ -399,10 +397,10 @@ |
500 |
""" |
501 |
|
502 |
def __init__(self, file_name, line, column): |
503 |
- # type: (unicode, int, int) -> None |
504 |
+ # type: (str, int, int) -> None |
505 |
"""Construct an Enum.""" |
506 |
- self.name = None # type: unicode |
507 |
- self.value = None # type: unicode |
508 |
+ self.name = None # type: str |
509 |
+ self.value = None # type: str |
510 |
|
511 |
super(EnumValue, self).__init__(file_name, line, column) |
512 |
|
513 |
@@ -415,11 +413,11 @@ |
514 |
""" |
515 |
|
516 |
def __init__(self, file_name, line, column): |
517 |
- # type: (unicode, int, int) -> None |
518 |
+ # type: (str, int, int) -> None |
519 |
"""Construct an Enum.""" |
520 |
- self.name = None # type: unicode |
521 |
- self.description = None # type: unicode |
522 |
- self.type = None # type: unicode |
523 |
+ self.name = None # type: str |
524 |
+ self.description = None # type: str |
525 |
+ self.type = None # type: str |
526 |
self.values = None # type: List[EnumValue] |
527 |
|
528 |
# Internal property that is not represented as syntax. An imported enum is read from an |
529 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idlc.py ./buildscripts/idl/idlc.py |
530 |
--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/idl/idlc.py 2020-10-16 18:02:58.000000000 +0200 |
531 |
+++ ./buildscripts/idl/idlc.py 2020-11-16 20:09:38.807705000 +0100 |
532 |
@@ -29,8 +29,6 @@ |
533 |
# |
534 |
"""IDL Compiler Driver Main Entry point.""" |
535 |
|
536 |
-from __future__ import absolute_import, print_function |
537 |
- |
538 |
import argparse |
539 |
import logging |
540 |
import sys |
541 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/buildscripts/utils.py ./buildscripts/utils.py |
542 |
--- /data/ports-build/mongodb-src-r4.0.21/buildscripts/utils.py 2020-10-16 18:02:58.000000000 +0200 |
543 |
+++ ./buildscripts/utils.py 2020-11-16 20:09:38.809660000 +0100 |
544 |
@@ -99,7 +99,7 @@ |
545 |
with open(os.devnull, "r+") as devnull: |
546 |
proc = subprocess.Popen("git describe --abbrev=7", stdout=subprocess.PIPE, stderr=devnull, |
547 |
stdin=devnull, shell=True) |
548 |
- return proc.communicate()[0].strip() |
549 |
+ return proc.communicate()[0].strip().decode('utf-8') |
550 |
|
551 |
|
552 |
def execsys(args): |
553 |
Only in ./site_scons: __pycache__ |
554 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/libdeps.py ./site_scons/libdeps.py |
555 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/libdeps.py 2020-10-16 18:02:58.000000000 +0200 |
556 |
+++ ./site_scons/libdeps.py 2020-11-16 20:09:38.813403000 +0100 |
557 |
@@ -61,7 +61,7 @@ |
558 |
missing_syslibdep = 'MISSING_LIBDEP_' |
559 |
|
560 |
class dependency(object): |
561 |
- Public, Private, Interface = range(3) |
562 |
+ Public, Private, Interface = list(range(3)) |
563 |
|
564 |
def __init__(self, value, deptype): |
565 |
self.target_node = value |
566 |
@@ -85,7 +85,7 @@ |
567 |
class DependencyCycleError(SCons.Errors.UserError): |
568 |
"""Exception representing a cycle discovered in library dependencies.""" |
569 |
|
570 |
- def __init__(self, first_node ): |
571 |
+ def __init__(self, first_node): |
572 |
super(DependencyCycleError, self).__init__() |
573 |
self.cycle_nodes = [first_node] |
574 |
|
575 |
@@ -100,8 +100,8 @@ |
576 |
setattr(node.attributes, "libdeps_direct_sorted", direct_sorted) |
577 |
return direct_sorted |
578 |
|
579 |
-def __get_libdeps(node): |
580 |
|
581 |
+def __get_libdeps(node): |
582 |
"""Given a SCons Node, return its library dependencies, topologically sorted. |
583 |
|
584 |
Computes the dependencies if they're not already cached. |
585 |
@@ -133,7 +133,7 @@ |
586 |
marked.add(n.target_node) |
587 |
tsorted.append(n.target_node) |
588 |
|
589 |
- except DependencyCycleError, e: |
590 |
+ except DependencyCycleError as e: |
591 |
if len(e.cycle_nodes) == 1 or e.cycle_nodes[0] != e.cycle_nodes[-1]: |
592 |
e.cycle_nodes.insert(0, n.target_node) |
593 |
raise |
594 |
@@ -150,6 +150,7 @@ |
595 |
|
596 |
return tsorted |
597 |
|
598 |
+ |
599 |
def __get_syslibdeps(node): |
600 |
""" Given a SCons Node, return its system library dependencies. |
601 |
|
602 |
@@ -161,11 +162,11 @@ |
603 |
for lib in __get_libdeps(node): |
604 |
for syslib in node.get_env().Flatten(lib.get_env().get(syslibdeps_env_var, [])): |
605 |
if syslib: |
606 |
- if type(syslib) in (str, unicode) and syslib.startswith(missing_syslibdep): |
607 |
- print("Target '%s' depends on the availability of a " |
608 |
+ if type(syslib) is str and syslib.startswith(missing_syslibdep): |
609 |
+ print(("Target '%s' depends on the availability of a " |
610 |
"system provided library for '%s', " |
611 |
"but no suitable library was found during configuration." % |
612 |
- (str(node), syslib[len(missing_syslibdep):])) |
613 |
+ (str(node), syslib[len(missing_syslibdep):]))) |
614 |
node.get_env().Exit(1) |
615 |
syslibdeps.append(syslib) |
616 |
setattr(node.attributes, cached_var_name, syslibdeps) |
617 |
@@ -181,18 +182,21 @@ |
618 |
|
619 |
if old_scanner: |
620 |
path_function = old_scanner.path_function |
621 |
+ |
622 |
def new_scanner(node, env, path=()): |
623 |
result = old_scanner.function(node, env, path) |
624 |
result.extend(__get_libdeps(node)) |
625 |
return result |
626 |
else: |
627 |
path_function = None |
628 |
+ |
629 |
def new_scanner(node, env, path=()): |
630 |
return __get_libdeps(node) |
631 |
|
632 |
builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, |
633 |
- path_function=path_function) |
634 |
+ path_function=path_function) |
635 |
|
636 |
+ |
637 |
def get_libdeps(source, target, env, for_signature): |
638 |
"""Implementation of the special _LIBDEPS environment variable. |
639 |
|
640 |
@@ -202,6 +206,7 @@ |
641 |
target = env.Flatten([target]) |
642 |
return __get_libdeps(target[0]) |
643 |
|
644 |
+ |
645 |
def get_libdeps_objs(source, target, env, for_signature): |
646 |
objs = [] |
647 |
for lib in get_libdeps(source, target, env, for_signature): |
648 |
@@ -209,6 +214,7 @@ |
649 |
objs.extend(lib.sources) |
650 |
return objs |
651 |
|
652 |
+ |
653 |
def get_syslibdeps(source, target, env, for_signature): |
654 |
deps = __get_syslibdeps(target[0]) |
655 |
lib_link_prefix = env.subst('$LIBLINKPREFIX') |
656 |
@@ -220,7 +226,7 @@ |
657 |
# they're believed to represent library short names, that should be prefixed with -l |
658 |
# or the compiler-specific equivalent. I.e., 'm' becomes '-lm', but 'File("m.a") is passed |
659 |
# through whole cloth. |
660 |
- if type(d) in (str, unicode): |
661 |
+ if type(d) is str: |
662 |
result.append('%s%s%s' % (lib_link_prefix, d, lib_link_suffix)) |
663 |
else: |
664 |
result.append(d) |
665 |
@@ -382,6 +388,7 @@ |
666 |
except KeyError: |
667 |
pass |
668 |
|
669 |
+ |
670 |
def setup_conftests(conf): |
671 |
def FindSysLibDep(context, name, libs, **kwargs): |
672 |
var = "LIBDEPS_" + name.upper() + "_SYSLIBDEP" |
673 |
@@ -394,4 +401,5 @@ |
674 |
return context.Result(result) |
675 |
context.env[var] = __missing_syslib(name) |
676 |
return context.Result(result) |
677 |
+ |
678 |
conf.AddTest('FindSysLibDep', FindSysLibDep) |
679 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/__init__.py ./site_scons/mongo/__init__.py |
680 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/__init__.py 2020-10-16 18:02:58.000000000 +0200 |
681 |
+++ ./site_scons/mongo/__init__.py 2020-11-16 20:09:38.815614000 +0100 |
682 |
@@ -5,4 +5,4 @@ |
683 |
def print_build_failures(): |
684 |
from SCons.Script import GetBuildFailures |
685 |
for bf in GetBuildFailures(): |
686 |
- print "%s failed: %s" % (bf.node, bf.errstr) |
687 |
+ print("%s failed: %s" % (bf.node, bf.errstr)) |
688 |
Only in ./site_scons/mongo: __pycache__ |
689 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/generators.py ./site_scons/mongo/generators.py |
690 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/mongo/generators.py 2020-10-16 18:02:58.000000000 +0200 |
691 |
+++ ./site_scons/mongo/generators.py 2020-11-16 20:09:38.817602000 +0100 |
692 |
@@ -1,6 +1,6 @@ |
693 |
# -*- mode: python; -*- |
694 |
|
695 |
-import md5 |
696 |
+import hashlib |
697 |
|
698 |
# Default and alternative generator definitions go here. |
699 |
|
700 |
@@ -15,22 +15,69 @@ |
701 |
# want to define them. |
702 |
def default_buildinfo_environment_data(): |
703 |
return ( |
704 |
- ('distmod', '$MONGO_DISTMOD', True, True,), |
705 |
- ('distarch', '$MONGO_DISTARCH', True, True,), |
706 |
- ('cc', '$CC_VERSION', True, False,), |
707 |
- ('ccflags', '$CCFLAGS', True, False,), |
708 |
- ('cxx', '$CXX_VERSION', True, False,), |
709 |
- ('cxxflags', '$CXXFLAGS', True, False,), |
710 |
- ('linkflags', '$LINKFLAGS', True, False,), |
711 |
- ('target_arch', '$TARGET_ARCH', True, True,), |
712 |
- ('target_os', '$TARGET_OS', True, False,), |
713 |
+ ( |
714 |
+ 'distmod', |
715 |
+ '$MONGO_DISTMOD', |
716 |
+ True, |
717 |
+ True, |
718 |
+ ), |
719 |
+ ( |
720 |
+ 'distarch', |
721 |
+ '$MONGO_DISTARCH', |
722 |
+ True, |
723 |
+ True, |
724 |
+ ), |
725 |
+ ( |
726 |
+ 'cc', |
727 |
+ '$CC_VERSION', |
728 |
+ True, |
729 |
+ False, |
730 |
+ ), |
731 |
+ ( |
732 |
+ 'ccflags', |
733 |
+ '$CCFLAGS', |
734 |
+ True, |
735 |
+ False, |
736 |
+ ), |
737 |
+ ( |
738 |
+ 'cxx', |
739 |
+ '$CXX_VERSION', |
740 |
+ True, |
741 |
+ False, |
742 |
+ ), |
743 |
+ ( |
744 |
+ 'cxxflags', |
745 |
+ '$CXXFLAGS', |
746 |
+ True, |
747 |
+ False, |
748 |
+ ), |
749 |
+ ( |
750 |
+ 'linkflags', |
751 |
+ '$LINKFLAGS', |
752 |
+ True, |
753 |
+ False, |
754 |
+ ), |
755 |
+ ( |
756 |
+ 'target_arch', |
757 |
+ '$TARGET_ARCH', |
758 |
+ True, |
759 |
+ True, |
760 |
+ ), |
761 |
+ ( |
762 |
+ 'target_os', |
763 |
+ '$TARGET_OS', |
764 |
+ True, |
765 |
+ False, |
766 |
+ ), |
767 |
) |
768 |
|
769 |
+ |
770 |
# If you want buildInfo and --version to be relatively empty, set |
771 |
# MONGO_BUILDINFO_ENVIRONMENT_DATA = empty_buildinfo_environment_data() |
772 |
def empty_buildinfo_environment_data(): |
773 |
return () |
774 |
|
775 |
+ |
776 |
def default_variant_dir_generator(target, source, env, for_signature): |
777 |
|
778 |
if env.GetOption('cache') != None: |
779 |
@@ -44,11 +91,11 @@ |
780 |
|
781 |
# Hash the named options and their values, and take the first 8 characters of the hash as |
782 |
# the variant name |
783 |
- hasher = md5.md5() |
784 |
+ hasher = hashlib.md5() |
785 |
for option in variant_options: |
786 |
- hasher.update(option) |
787 |
- hasher.update(str(env.GetOption(option))) |
788 |
- variant_dir = hasher.hexdigest()[0:8] |
789 |
+ hasher.update(option.encode('utf-8')) |
790 |
+ hasher.update(str(env.GetOption(option)).encode('utf-8')) |
791 |
+ variant_dir = str(hasher.hexdigest()[0:8]) |
792 |
|
793 |
# If our option hash yields a well known hash, replace it with its name. |
794 |
known_variant_hashes = { |
795 |
Only in ./site_scons/site_tools: __pycache__ |
796 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/distsrc.py ./site_scons/site_tools/distsrc.py |
797 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/distsrc.py 2020-10-16 18:02:58.000000000 +0200 |
798 |
+++ ./site_scons/site_tools/distsrc.py 2020-11-16 20:09:38.819994000 +0100 |
799 |
@@ -20,7 +20,7 @@ |
800 |
import tarfile |
801 |
import time |
802 |
import zipfile |
803 |
-import StringIO |
804 |
+import io |
805 |
|
806 |
from distutils.spawn import find_executable |
807 |
|
808 |
@@ -28,7 +28,7 @@ |
809 |
|
810 |
class DistSrcFile: |
811 |
def __init__(self, **kwargs): |
812 |
- [ setattr(self, key, val) for (key, val) in kwargs.items() ] |
813 |
+ [ setattr(self, key, val) for (key, val) in list(kwargs.items()) ] |
814 |
|
815 |
def __str__(self): |
816 |
return self.name |
817 |
@@ -60,6 +60,7 @@ |
818 |
def close(self): |
819 |
self.archive_file.close() |
820 |
|
821 |
+ |
822 |
class DistSrcTarArchive(DistSrcArchive): |
823 |
def __iter__(self): |
824 |
file_list = self.archive_file.getnames() |
825 |
@@ -82,7 +83,7 @@ |
826 |
|
827 |
def append_file_contents(self, filename, file_contents, |
828 |
mtime=time.time(), |
829 |
- mode=0644, |
830 |
+ mode=0o644, |
831 |
uname="root", |
832 |
gname="root"): |
833 |
file_metadata = tarfile.TarInfo(name=filename) |
834 |
@@ -91,7 +92,7 @@ |
835 |
file_metadata.uname = uname |
836 |
file_metadata.gname = gname |
837 |
file_metadata.size = len(file_contents) |
838 |
- file_buf = StringIO.StringIO(file_contents) |
839 |
+ file_buf = io.BytesIO(file_contents.encode('utf-8')) |
840 |
if self.archive_mode == 'r': |
841 |
self.archive_file.close() |
842 |
self.archive_file = tarfile.open( |
843 |
@@ -105,6 +106,7 @@ |
844 |
def append_file(self, filename, localfile): |
845 |
self.archive_file.add(localfile, arcname=filename) |
846 |
|
847 |
+ |
848 |
class DistSrcZipArchive(DistSrcArchive): |
849 |
def __iter__(self): |
850 |
file_list = self.archive_file.namelist() |
851 |
@@ -119,7 +121,7 @@ |
852 |
name=key, |
853 |
size=item_data.file_size, |
854 |
mtime=time.mktime(fixed_time), |
855 |
- mode=0775 if is_dir else 0664, |
856 |
+ mode=0o775 if is_dir else 0o664, |
857 |
type=tarfile.DIRTYPE if is_dir else tarfile.REGTYPE, |
858 |
uid=0, |
859 |
gid=0, |
860 |
@@ -129,7 +131,7 @@ |
861 |
|
862 |
def append_file_contents(self, filename, file_contents, |
863 |
mtime=time.time(), |
864 |
- mode=0644, |
865 |
+ mode=0o644, |
866 |
uname="root", |
867 |
gname="root"): |
868 |
self.archive_file.writestr(filename, file_contents) |
869 |
@@ -139,7 +141,7 @@ |
870 |
|
871 |
def build_error_action(msg): |
872 |
def error_stub(target=None, source=None, env=None): |
873 |
- print msg |
874 |
+ print(msg) |
875 |
env.Exit(1) |
876 |
return [ error_stub ] |
877 |
|
878 |
@@ -162,7 +164,7 @@ |
879 |
|
880 |
target_ext = str(target[0])[-3:] |
881 |
if not target_ext in [ 'zip', 'tar' ]: |
882 |
- print "Invalid file format for distsrc. Must be tar or zip file" |
883 |
+ print("Invalid file format for distsrc. Must be tar or zip file") |
884 |
env.Exit(1) |
885 |
|
886 |
git_cmd = "\"%s\" archive --format %s --output %s --prefix ${MONGO_DIST_SRC_PREFIX} HEAD" % ( |
887 |
@@ -173,14 +175,14 @@ |
888 |
SCons.Action.Action(run_distsrc_callbacks, "Running distsrc callbacks for $TARGET") |
889 |
] |
890 |
|
891 |
+ |
892 |
def add_callback(env, fn): |
893 |
__distsrc_callbacks.append(fn) |
894 |
|
895 |
+ |
896 |
def generate(env, **kwargs): |
897 |
env.AddMethod(add_callback, 'AddDistSrcCallback') |
898 |
- env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder( |
899 |
- generator=distsrc_action_generator, |
900 |
- ) |
901 |
+ env['BUILDERS']['__DISTSRC'] = SCons.Builder.Builder(generator=distsrc_action_generator, ) |
902 |
|
903 |
def DistSrc(env, target): |
904 |
result = env.__DISTSRC(target=target, source=[]) |
905 |
@@ -189,6 +191,7 @@ |
906 |
return result |
907 |
|
908 |
env.AddMethod(DistSrc, 'DistSrc') |
909 |
+ |
910 |
|
911 |
def exists(env): |
912 |
return True |
913 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/idl_tool.py ./site_scons/site_tools/idl_tool.py |
914 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/idl_tool.py 2020-10-16 18:02:58.000000000 +0200 |
915 |
+++ ./site_scons/site_tools/idl_tool.py 2020-11-16 20:09:38.821062000 +0100 |
916 |
@@ -21,6 +21,7 @@ |
917 |
|
918 |
import SCons |
919 |
|
920 |
+ |
921 |
def idlc_emitter(target, source, env): |
922 |
"""For each input IDL file, the tool produces a .cpp and .h file.""" |
923 |
first_source = str(source[0]) |
924 |
@@ -43,7 +44,7 @@ |
925 |
def idl_scanner(node, env, path): |
926 |
# Use the import scanner mode of the IDL compiler to file imported files |
927 |
cmd = [sys.executable, "buildscripts/idl/idlc.py", '--include','src', str(node), '--write-dependencies'] |
928 |
- deps_str = subprocess.check_output(cmd) |
929 |
+ deps_str = subprocess.check_output(cmd).decode('utf-8') |
930 |
|
931 |
deps_list = deps_str.splitlines() |
932 |
|
933 |
@@ -57,19 +58,14 @@ |
934 |
idl_scanner = SCons.Scanner.Scanner(function=idl_scanner, skeys=['.idl']) |
935 |
|
936 |
# TODO: create a scanner for imports when imports are implemented |
937 |
-IDLCBuilder = SCons.Builder.Builder( |
938 |
- action=IDLCAction, |
939 |
- emitter=idlc_emitter, |
940 |
- srcsuffx=".idl", |
941 |
- suffix=".cpp", |
942 |
- source_scanner = idl_scanner |
943 |
- ) |
944 |
+IDLCBuilder = SCons.Builder.Builder(action=IDLCAction, emitter=idlc_emitter, srcsuffx=".idl", |
945 |
+ suffix=".cpp", source_scanner=idl_scanner) |
946 |
|
947 |
|
948 |
def generate(env): |
949 |
bld = IDLCBuilder |
950 |
|
951 |
- env.Append(SCANNERS = idl_scanner) |
952 |
+ env.Append(SCANNERS=idl_scanner) |
953 |
|
954 |
env['BUILDERS']['Idlc'] = bld |
955 |
|
956 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/jstoh.py ./site_scons/site_tools/jstoh.py |
957 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/jstoh.py 2020-10-16 18:02:58.000000000 +0200 |
958 |
+++ ./site_scons/site_tools/jstoh.py 2020-11-16 20:30:59.809428000 +0100 |
959 |
@@ -39,7 +39,7 @@ |
960 |
|
961 |
text = '\n'.join(h) |
962 |
|
963 |
- with open(outFile, 'wb') as out: |
964 |
+ with open(outFile, 'w') as out: |
965 |
try: |
966 |
out.write(text) |
967 |
finally: |
968 |
@@ -48,7 +48,7 @@ |
969 |
|
970 |
if __name__ == "__main__": |
971 |
if len(sys.argv) < 3: |
972 |
- print "Must specify [target] [source] " |
973 |
+ print("Must specify [target] [source] ") |
974 |
sys.exit(1) |
975 |
|
976 |
jsToHeader(sys.argv[1], sys.argv[2:]) |
977 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/mongo_benchmark.py ./site_scons/site_tools/mongo_benchmark.py |
978 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/mongo_benchmark.py 2020-10-16 18:02:58.000000000 +0200 |
979 |
+++ ./site_scons/site_tools/mongo_benchmark.py 2020-11-16 20:09:38.823525000 +0100 |
980 |
@@ -11,10 +11,10 @@ |
981 |
env.Alias('$BENCHMARK_ALIAS', test) |
982 |
|
983 |
def benchmark_list_builder_action(env, target, source): |
984 |
- ofile = open(str(target[0]), 'wb') |
985 |
+ ofile = open(str(target[0]), 'w') |
986 |
try: |
987 |
for s in _benchmarks: |
988 |
- print '\t' + str(s) |
989 |
+ print('\t' + str(s)) |
990 |
ofile.write('%s\n' % s) |
991 |
finally: |
992 |
ofile.close() |
993 |
@@ -40,9 +40,10 @@ |
994 |
bmEnv.Install("#/build/benchmark/", result[0]) |
995 |
return result |
996 |
|
997 |
+ |
998 |
def generate(env): |
999 |
env.Command('$BENCHMARK_LIST', env.Value(_benchmarks), |
1000 |
- Action(benchmark_list_builder_action, "Generating $TARGET")) |
1001 |
+ Action(benchmark_list_builder_action, "Generating $TARGET")) |
1002 |
env.AddMethod(register_benchmark, 'RegisterBenchmark') |
1003 |
env.AddMethod(build_benchmark, 'Benchmark') |
1004 |
env.Alias('$BENCHMARK_ALIAS', '$BENCHMARK_LIST') |
1005 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/mongo_integrationtest.py ./site_scons/site_tools/mongo_integrationtest.py |
1006 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/mongo_integrationtest.py 2020-10-16 18:02:58.000000000 +0200 |
1007 |
+++ ./site_scons/site_tools/mongo_integrationtest.py 2020-11-16 20:09:38.824569000 +0100 |
1008 |
@@ -12,10 +12,10 @@ |
1009 |
env.Alias('$INTEGRATION_TEST_ALIAS', installed_test) |
1010 |
|
1011 |
def integration_test_list_builder_action(env, target, source): |
1012 |
- ofile = open(str(target[0]), 'wb') |
1013 |
+ ofile = open(str(target[0]), 'w') |
1014 |
try: |
1015 |
for s in _integration_tests: |
1016 |
- print '\t' + str(s) |
1017 |
+ print('\t' + str(s)) |
1018 |
ofile.write('%s\n' % s) |
1019 |
finally: |
1020 |
ofile.close() |
1021 |
@@ -31,9 +31,10 @@ |
1022 |
env.RegisterIntegrationTest(result[0]) |
1023 |
return result |
1024 |
|
1025 |
+ |
1026 |
def generate(env): |
1027 |
env.Command('$INTEGRATION_TEST_LIST', env.Value(_integration_tests), |
1028 |
- Action(integration_test_list_builder_action, "Generating $TARGET")) |
1029 |
+ Action(integration_test_list_builder_action, "Generating $TARGET")) |
1030 |
env.AddMethod(register_integration_test, 'RegisterIntegrationTest') |
1031 |
env.AddMethod(build_cpp_integration_test, 'CppIntegrationTest') |
1032 |
env.Alias('$INTEGRATION_TEST_ALIAS', '$INTEGRATION_TEST_LIST') |
1033 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/mongo_unittest.py ./site_scons/site_tools/mongo_unittest.py |
1034 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/mongo_unittest.py 2020-10-16 18:02:58.000000000 +0200 |
1035 |
+++ ./site_scons/site_tools/mongo_unittest.py 2020-11-16 20:09:38.825577000 +0100 |
1036 |
@@ -11,10 +11,10 @@ |
1037 |
env.Alias('$UNITTEST_ALIAS', test) |
1038 |
|
1039 |
def unit_test_list_builder_action(env, target, source): |
1040 |
- ofile = open(str(target[0]), 'wb') |
1041 |
+ ofile = open(str(target[0]), 'w') |
1042 |
try: |
1043 |
for s in _unittests: |
1044 |
- print '\t' + str(s) |
1045 |
+ print('\t' + str(s)) |
1046 |
ofile.write('%s\n' % s) |
1047 |
finally: |
1048 |
ofile.close() |
1049 |
@@ -33,9 +33,10 @@ |
1050 |
env.Install("#/build/unittests/", result[0]) |
1051 |
return result |
1052 |
|
1053 |
+ |
1054 |
def generate(env): |
1055 |
env.Command('$UNITTEST_LIST', env.Value(_unittests), |
1056 |
- Action(unit_test_list_builder_action, "Generating $TARGET")) |
1057 |
+ Action(unit_test_list_builder_action, "Generating $TARGET")) |
1058 |
env.AddMethod(register_unit_test, 'RegisterUnitTest') |
1059 |
env.AddMethod(build_cpp_unit_test, 'CppUnitTest') |
1060 |
env.Alias('$UNITTEST_ALIAS', '$UNITTEST_LIST') |
1061 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/split_dwarf.py ./site_scons/site_tools/split_dwarf.py |
1062 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/split_dwarf.py 2020-10-16 18:02:58.000000000 +0200 |
1063 |
+++ ./site_scons/site_tools/split_dwarf.py 2020-11-16 20:09:38.826716000 +0100 |
1064 |
@@ -26,6 +26,7 @@ |
1065 |
if SCons.Util.case_sensitive_suffixes('.c', '.C'): |
1066 |
_CXXSuffixes.append('.C') |
1067 |
|
1068 |
+ |
1069 |
def _dwo_emitter(target, source, env): |
1070 |
new_targets = [] |
1071 |
for t in target: |
1072 |
@@ -40,6 +41,7 @@ |
1073 |
targets = target + new_targets |
1074 |
return (targets, source) |
1075 |
|
1076 |
+ |
1077 |
def generate(env): |
1078 |
suffixes = [] |
1079 |
if _splitDwarfFlag in env['CCFLAGS']: |
1080 |
@@ -52,7 +54,7 @@ |
1081 |
|
1082 |
for object_builder in SCons.Tool.createObjBuilders(env): |
1083 |
emitterdict = object_builder.builder.emitter |
1084 |
- for suffix in emitterdict.iterkeys(): |
1085 |
+ for suffix in emitterdict.keys(): |
1086 |
if not suffix in suffixes: |
1087 |
continue |
1088 |
base = emitterdict[suffix] |
1089 |
@@ -60,6 +62,7 @@ |
1090 |
base, |
1091 |
_dwo_emitter, |
1092 |
]) |
1093 |
+ |
1094 |
|
1095 |
def exists(env): |
1096 |
return any(_splitDwarfFlag in env[f] for f in ['CCFLAGS', 'CFLAGS', 'CXXFLAGS']) |
1097 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/thin_archive.py ./site_scons/site_tools/thin_archive.py |
1098 |
--- /data/ports-build/mongodb-src-r4.0.21/site_scons/site_tools/thin_archive.py 2020-10-16 18:02:58.000000000 +0200 |
1099 |
+++ ./site_scons/site_tools/thin_archive.py 2020-11-16 20:09:38.827857000 +0100 |
1100 |
@@ -17,6 +17,7 @@ |
1101 |
import re |
1102 |
import subprocess |
1103 |
|
1104 |
+ |
1105 |
def exists(env): |
1106 |
if not 'AR' in env: |
1107 |
return False |
1108 |
@@ -30,10 +31,9 @@ |
1109 |
if not "rc" in env['ARFLAGS']: |
1110 |
return False |
1111 |
|
1112 |
- pipe = SCons.Action._subproc(env, SCons.Util.CLVar(ar) + ['--version'], |
1113 |
- stdin = 'devnull', |
1114 |
- stderr = 'devnull', |
1115 |
- stdout = subprocess.PIPE) |
1116 |
+ pipe = SCons.Action._subproc(env, |
1117 |
+ SCons.Util.CLVar(ar) + ['--version'], stdin='devnull', |
1118 |
+ stderr='devnull', stdout=subprocess.PIPE) |
1119 |
if pipe.wait() != 0: |
1120 |
return False |
1121 |
|
1122 |
@@ -41,7 +41,7 @@ |
1123 |
for line in pipe.stdout: |
1124 |
if found: |
1125 |
continue # consume all data |
1126 |
- found = re.search(r'^GNU ar|^LLVM', line) |
1127 |
+ found = re.search(r'^GNU ar|^LLVM', line.decode('utf-8')) |
1128 |
|
1129 |
return bool(found) |
1130 |
|
1131 |
@@ -56,6 +56,7 @@ |
1132 |
new_emitter = SCons.Builder.ListEmitter([base_emitter, new_emitter]) |
1133 |
builder.emitter = new_emitter |
1134 |
|
1135 |
+ |
1136 |
def _add_scanner(builder): |
1137 |
old_scanner = builder.target_scanner |
1138 |
path_function = old_scanner.path_function |
1139 |
@@ -69,13 +70,16 @@ |
1140 |
new_results.extend(base.children()) |
1141 |
return new_results |
1142 |
|
1143 |
- builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, path_function=path_function) |
1144 |
+ builder.target_scanner = SCons.Scanner.Scanner(function=new_scanner, |
1145 |
+ path_function=path_function) |
1146 |
|
1147 |
+ |
1148 |
def generate(env): |
1149 |
if not exists(env): |
1150 |
return |
1151 |
|
1152 |
- env['ARFLAGS'] = SCons.Util.CLVar([arflag if arflag != "rc" else "rcsTD" for arflag in env['ARFLAGS']]) |
1153 |
+ env['ARFLAGS'] = SCons.Util.CLVar( |
1154 |
+ [arflag if arflag != "rc" else "rcsTD" for arflag in env['ARFLAGS']]) |
1155 |
|
1156 |
def noop_action(env, target, source): |
1157 |
pass |
1158 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/src/mongo/SConscript ./src/mongo/SConscript |
1159 |
--- /data/ports-build/mongodb-src-r4.0.21/src/mongo/SConscript 2020-10-16 18:02:58.000000000 +0200 |
1160 |
+++ ./src/mongo/SConscript 2020-11-16 20:09:38.834118000 +0100 |
1161 |
@@ -155,9 +155,9 @@ |
1162 |
|
1163 |
# On windows, we need to escape the backslashes in the command-line |
1164 |
# so that windows paths look okay. |
1165 |
-cmd_line = " ".join(sys.argv).encode('string-escape') |
1166 |
+cmd_line = " ".join(sys.argv).encode('unicode_escape') |
1167 |
if env.TargetOSIs('windows'): |
1168 |
- cmd_line = cmd_line.replace('\\', r'\\') |
1169 |
+ cmd_line = cmd_line.replace(b'\\', b'\\') |
1170 |
|
1171 |
module_list = '{ %s }' % ', '.join([ '"{0}"'.format(x) for x in env['MONGO_MODULES'] ]) |
1172 |
|
1173 |
@@ -662,7 +662,7 @@ |
1174 |
|
1175 |
# If no module has introduced a file named LICENSE-Enterprise.txt then this |
1176 |
# is a Community build, so inject the Community license |
1177 |
-if sum(itertools.imap(lambda x: x.name == "LICENSE-Enterprise.txt", env['MODULE_BANNERS'])) == 0: |
1178 |
+if sum(map(lambda x: x.name == "LICENSE.txt", env['MODULE_BANNERS'])) == 0: |
1179 |
env.Append(MODULE_BANNERS = [distsrc.File('LICENSE-Community.txt')]) |
1180 |
|
1181 |
# All module banners get staged to the top level of the tarfile, so we |
1182 |
@@ -681,7 +681,7 @@ |
1183 |
# Allow modules to map original file name directories to subdirectories |
1184 |
# within the archive (e.g. { "src/mongo/db/modules/enterprise/docs": "snmp"}) |
1185 |
archive_addition_transforms = [] |
1186 |
-for full_dir, archive_dir in env["ARCHIVE_ADDITION_DIR_MAP"].items(): |
1187 |
+for full_dir, archive_dir in list(env["ARCHIVE_ADDITION_DIR_MAP"].items()): |
1188 |
archive_addition_transforms.append("--transform \"%s=$SERVER_DIST_BASENAME/%s\"" % |
1189 |
(full_dir, archive_dir)) |
1190 |
|
1191 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/src/mongo/base/generate_error_codes.py ./src/mongo/base/generate_error_codes.py |
1192 |
--- /data/ports-build/mongodb-src-r4.0.21/src/mongo/base/generate_error_codes.py 2020-10-16 18:02:58.000000000 +0200 |
1193 |
+++ ./src/mongo/base/generate_error_codes.py 2020-11-16 20:09:38.838467000 +0100 |
1194 |
@@ -26,7 +26,6 @@ |
1195 |
# delete this exception statement from your version. If you delete this |
1196 |
# exception statement from all source files in the program, then also delete |
1197 |
# it in the license file. |
1198 |
- |
1199 |
"""Generate error_codes.{h,cpp} from error_codes.err. |
1200 |
|
1201 |
Format of error_codes.err: |
1202 |
@@ -68,11 +67,13 @@ |
1203 |
self.extra = extra |
1204 |
self.categories = [] |
1205 |
|
1206 |
+ |
1207 |
class ErrorClass: |
1208 |
def __init__(self, name, codes): |
1209 |
self.name = name |
1210 |
self.codes = codes |
1211 |
|
1212 |
+ |
1213 |
def main(argv): |
1214 |
# Parse and validate argv. |
1215 |
if len(sys.argv) < 2: |
1216 |
@@ -99,7 +100,7 @@ |
1217 |
categories=error_classes, |
1218 |
) |
1219 |
|
1220 |
- with open(output, 'wb') as outfile: |
1221 |
+ with open(output, 'w') as outfile: |
1222 |
outfile.write(text) |
1223 |
|
1224 |
def die(message=None): |
1225 |
@@ -131,6 +132,7 @@ |
1226 |
if failed: |
1227 |
die() |
1228 |
|
1229 |
+ |
1230 |
def has_duplicate_error_codes(error_codes): |
1231 |
sorted_by_name = sorted(error_codes, key=lambda x: x.name) |
1232 |
sorted_by_code = sorted(error_codes, key=lambda x: x.code) |
1233 |
@@ -139,21 +141,22 @@ |
1234 |
prev = sorted_by_name[0] |
1235 |
for curr in sorted_by_name[1:]: |
1236 |
if curr.name == prev.name: |
1237 |
- sys.stdout.write('Duplicate name %s with codes %s and %s\n' |
1238 |
- % (curr.name, curr.code, prev.code)) |
1239 |
+ sys.stdout.write( |
1240 |
+ 'Duplicate name %s with codes %s and %s\n' % (curr.name, curr.code, prev.code)) |
1241 |
failed = True |
1242 |
prev = curr |
1243 |
|
1244 |
prev = sorted_by_code[0] |
1245 |
for curr in sorted_by_code[1:]: |
1246 |
if curr.code == prev.code: |
1247 |
- sys.stdout.write('Duplicate code %s with names %s and %s\n' |
1248 |
- % (curr.code, curr.name, prev.name)) |
1249 |
+ sys.stdout.write( |
1250 |
+ 'Duplicate code %s with names %s and %s\n' % (curr.code, curr.name, prev.name)) |
1251 |
failed = True |
1252 |
prev = curr |
1253 |
|
1254 |
return failed |
1255 |
|
1256 |
+ |
1257 |
def has_duplicate_error_classes(error_classes): |
1258 |
names = sorted(ec.name for ec in error_classes) |
1259 |
|
1260 |
@@ -166,6 +169,7 @@ |
1261 |
prev_name = name |
1262 |
return failed |
1263 |
|
1264 |
+ |
1265 |
def has_missing_error_codes(error_codes, error_classes): |
1266 |
code_names = dict((ec.name, ec) for ec in error_codes) |
1267 |
failed = False |
1268 |
@@ -178,6 +182,7 @@ |
1269 |
failed = True |
1270 |
|
1271 |
return failed |
1272 |
+ |
1273 |
|
1274 |
if __name__ == '__main__': |
1275 |
main(sys.argv) |
1276 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/src/mongo/db/auth/generate_action_types.py ./src/mongo/db/auth/generate_action_types.py |
1277 |
--- /data/ports-build/mongodb-src-r4.0.21/src/mongo/db/auth/generate_action_types.py 2020-10-16 18:02:58.000000000 +0200 |
1278 |
+++ ./src/mongo/db/auth/generate_action_types.py 2020-11-16 20:09:38.846414000 +0100 |
1279 |
@@ -26,7 +26,6 @@ |
1280 |
# delete this exception statement from your version. If you delete this |
1281 |
# exception statement from all source files in the program, then also delete |
1282 |
# it in the license file. |
1283 |
- |
1284 |
"""Generate action_type.{h,cpp} |
1285 |
|
1286 |
Usage: |
1287 |
@@ -35,7 +34,6 @@ |
1288 |
|
1289 |
import sys |
1290 |
|
1291 |
- |
1292 |
headerFileTemplate = """// AUTO-GENERATED FILE DO NOT EDIT |
1293 |
// See src/mongo/db/auth/generate_action_types.py |
1294 |
/** |
1295 |
@@ -194,14 +192,14 @@ |
1296 |
} // namespace mongo |
1297 |
""" |
1298 |
|
1299 |
+ |
1300 |
def writeSourceFile(actionTypes, sourceOutputFile): |
1301 |
actionTypeConstants = "" |
1302 |
fromStringIfStatements = "" |
1303 |
toStringCaseStatements = "" |
1304 |
for actionType in actionTypes: |
1305 |
actionTypeConstants += (" const ActionType ActionType::%(actionType)s" |
1306 |
- "(%(actionType)sValue);\n" % |
1307 |
- dict(actionType=actionType)) |
1308 |
+ "(%(actionType)sValue);\n" % dict(actionType=actionType)) |
1309 |
fromStringIfStatements += """ if (action == "%(actionType)s") { |
1310 |
*result = %(actionType)s; |
1311 |
return Status::OK(); |
1312 |
@@ -215,6 +213,7 @@ |
1313 |
|
1314 |
pass |
1315 |
|
1316 |
+ |
1317 |
def writeHeaderFile(actionTypes, headerOutputFile): |
1318 |
actionTypeConstants = "" |
1319 |
actionTypeIdentifiers = "" |
1320 |
@@ -225,6 +224,7 @@ |
1321 |
actionTypeIdentifiers=actionTypeIdentifiers) |
1322 |
headerOutputFile.write(formattedHeaderFile) |
1323 |
|
1324 |
+ |
1325 |
def hasDuplicateActionTypes(actionTypes): |
1326 |
sortedActionTypes = sorted(actionTypes) |
1327 |
|
1328 |
@@ -232,7 +232,7 @@ |
1329 |
prevActionType = sortedActionTypes[0] |
1330 |
for actionType in sortedActionTypes[1:]: |
1331 |
if actionType == prevActionType: |
1332 |
- print 'Duplicate actionType %s\n' % actionType |
1333 |
+ print('Duplicate actionType %s\n' % actionType) |
1334 |
didFail = True |
1335 |
prevActionType = actionType |
1336 |
|
1337 |
@@ -245,7 +245,7 @@ |
1338 |
|
1339 |
if __name__ == "__main__": |
1340 |
if len(sys.argv) != 4: |
1341 |
- print "Usage: generate_action_types.py <path to action_types.txt> <header file path> <source file path>" |
1342 |
+ print("Usage: generate_action_types.py <path to action_types.txt> <header file path> <source file path>") |
1343 |
sys.exit(-1) |
1344 |
|
1345 |
actionTypes = parseActionTypesFromFile(sys.argv[1]) |
1346 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/src/mongo/db/fts/generate_stop_words.py ./src/mongo/db/fts/generate_stop_words.py |
1347 |
--- /data/ports-build/mongodb-src-r4.0.21/src/mongo/db/fts/generate_stop_words.py 2020-10-16 18:02:58.000000000 +0200 |
1348 |
+++ ./src/mongo/db/fts/generate_stop_words.py 2020-11-16 20:09:38.851050000 +0100 |
1349 |
@@ -1,7 +1,7 @@ |
1350 |
import sys |
1351 |
|
1352 |
def generate( header, source, language_files ): |
1353 |
- out = open( header, "wb" ) |
1354 |
+ out = open( header, "w" ) |
1355 |
out.write( """ |
1356 |
#pragma once |
1357 |
#include <set> |
1358 |
@@ -18,8 +18,8 @@ |
1359 |
|
1360 |
|
1361 |
|
1362 |
- out = open( source, "wb" ) |
1363 |
- out.write( '#include "%s"' % header.rpartition( "/" )[2].rpartition( "\\" )[2] ) |
1364 |
+ out = open( source, "w", encoding='utf-8') |
1365 |
+ out.write( '#include "{}"'.format(header.rpartition( "/" )[2].rpartition( "\\" )[2]) ) |
1366 |
out.write( """ |
1367 |
namespace mongo { |
1368 |
namespace fts { |
1369 |
@@ -35,12 +35,13 @@ |
1370 |
out.write( ' {\n' ) |
1371 |
out.write( ' const char* const words[] = {\n' ) |
1372 |
for word in open( l_file, "rb" ): |
1373 |
- out.write( ' "%s",\n' % word.strip() ) |
1374 |
+ out.write( ' "%s",\n' % word.decode('utf-8').strip() ) |
1375 |
out.write( ' };\n' ) |
1376 |
out.write( ' const size_t wordcnt = sizeof(words) / sizeof(words[0]);\n' ) |
1377 |
out.write( ' std::set< std::string >& l = (*m)["%s"];\n' % l ) |
1378 |
out.write( ' l.insert(&words[0], &words[wordcnt]);\n' ) |
1379 |
out.write( ' }\n' ) |
1380 |
+ |
1381 |
out.write( """ |
1382 |
} |
1383 |
} // namespace fts |
1384 |
diff -ru /data/ports-build/mongodb-src-r4.0.21/src/mongo/util/generate_icu_init_cpp.py ./src/mongo/util/generate_icu_init_cpp.py |
1385 |
--- /data/ports-build/mongodb-src-r4.0.21/src/mongo/util/generate_icu_init_cpp.py 2020-10-16 18:02:58.000000000 +0200 |
1386 |
+++ ./src/mongo/util/generate_icu_init_cpp.py 2020-11-16 21:01:59.537644000 +0100 |
1387 |
@@ -112,8 +112,8 @@ |
1388 |
''' |
1389 |
decimal_encoded_data = '' |
1390 |
with open(data_file_path, 'rb') as data_file: |
1391 |
- decimal_encoded_data = ','.join([str(ord(byte)) for byte in data_file.read()]) |
1392 |
- with open(cpp_file_path, 'wb') as cpp_file: |
1393 |
+ decimal_encoded_data = ','.join([str(byte) for byte in data_file.read()]) |
1394 |
+ with open(cpp_file_path, 'w') as cpp_file: |
1395 |
cpp_file.write(source_template % dict(decimal_encoded_data=decimal_encoded_data)) |
1396 |
|
1397 |
if __name__ == '__main__': |