# HG changeset patch # User Augie Fackler # Date 1555522878 14400 # Node ID 4a8d9ed864754837a185a642170cde24392f9abf # Parent d1c33b2442a7c3e9ba0609ae76ae178e723d2fc4# Parent 29569f2db9293fca58c5e34fe15f4833aeb40ed0 merge: default into stable for release candidate diff -r d1c33b2442a7 -r 4a8d9ed86475 Makefile --- a/Makefile Tue Mar 19 09:23:35 2019 -0400 +++ b/Makefile Wed Apr 17 13:41:18 2019 -0400 @@ -5,7 +5,7 @@ # % make PREFIX=/opt/ install export PREFIX=/usr/local -PYTHON=python +PYTHON?=python $(eval HGROOT := $(shell pwd)) HGPYTHONS ?= $(HGROOT)/build/pythons PURE= diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/README.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/README.rst Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,127 @@ +==================== +Mercurial Automation +==================== + +This directory contains code and utilities for building and testing Mercurial +on remote machines. + +The ``automation.py`` Script +============================ + +``automation.py`` is an executable Python script (requires Python 3.5+) +that serves as a driver to common automation tasks. + +When executed, the script will *bootstrap* a virtualenv in +``/build/venv-automation`` then re-execute itself using +that virtualenv. So there is no need for the caller to have a virtualenv +explicitly activated. This virtualenv will be populated with various +dependencies (as defined by the ``requirements.txt`` file). + +To see what you can do with this script, simply run it:: + + $ ./automation.py + +Local State +=========== + +By default, local state required to interact with remote servers is stored +in the ``~/.hgautomation`` directory. + +We attempt to limit persistent state to this directory. Even when +performing tasks that may have side-effects, we try to limit those +side-effects so they don't impact the local system. e.g. when we SSH +into a remote machine, we create a temporary directory for the SSH +config so the user's known hosts file isn't updated. + +AWS Integration +=============== + +Various automation tasks integrate with AWS to provide access to +resources such as EC2 instances for generic compute. + +This obviously requires an AWS account and credentials to work. + +We use the ``boto3`` library for interacting with AWS APIs. We do not employ +any special functionality for telling ``boto3`` where to find AWS credentials. See +https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html +for how ``boto3`` works. Once you have configured your environment such +that ``boto3`` can find credentials, interaction with AWS should *just work*. + +.. hint:: + + Typically you have a ``~/.aws/credentials`` file containing AWS + credentials. If you manage multiple credentials, you can override which + *profile* to use at run-time by setting the ``AWS_PROFILE`` environment + variable. + +Resource Management +------------------- + +Depending on the task being performed, various AWS services will be accessed. +This of course requires AWS credentials with permissions to access these +services. + +The following AWS services can be accessed by automation tasks: + +* EC2 +* IAM +* Simple Systems Manager (SSM) + +Various resources will also be created as part of performing various tasks. +This also requires various permissions. + +The following AWS resources can be created by automation tasks: + +* EC2 key pairs +* EC2 security groups +* EC2 instances +* IAM roles and instance profiles +* SSM command invocations + +When possible, we prefix resource names with ``hg-`` so they can easily +be identified as belonging to Mercurial. + +.. important:: + + We currently assume that AWS accounts utilized by *us* are single + tenancy. Attempts to have discrete users of ``automation.py`` (including + sharing credentials across machines) using the same AWS account can result + in them interfering with each other and things breaking. + +Cost of Operation +----------------- + +``automation.py`` tries to be frugal with regards to utilization of remote +resources. Persistent remote resources are minimized in order to keep costs +in check. For example, EC2 instances are often ephemeral and only live as long +as the operation being performed. + +Under normal operation, recurring costs are limited to: + +* Storage costs for AMI / EBS snapshots. This should be just a few pennies + per month. + +When running EC2 instances, you'll be billed accordingly. By default, we +use *small* instances, like ``t3.medium``. This instance type costs ~$0.07 per +hour. + +.. note:: + + When running Windows EC2 instances, AWS bills at the full hourly cost, even + if the instance doesn't run for a full hour (per-second billing doesn't + apply to Windows AMIs). + +Managing Remote Resources +------------------------- + +Occassionally, there may be an error purging a temporary resource. Or you +may wish to forcefully purge remote state. Commands can be invoked to manually +purge remote resources. + +To terminate all EC2 instances that we manage:: + + $ automation.py terminate-ec2-instances + +To purge all EC2 resources that we manage:: + + $ automation.py purge-ec2-resources diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/automation.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/automation.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 +# +# automation.py - Perform tasks on remote machines +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +import pathlib +import subprocess +import sys +import venv + + +HERE = pathlib.Path(os.path.abspath(__file__)).parent +REQUIREMENTS_TXT = HERE / 'requirements.txt' +SOURCE_DIR = HERE.parent.parent +VENV = SOURCE_DIR / 'build' / 'venv-automation' + + +def bootstrap(): + venv_created = not VENV.exists() + + VENV.parent.mkdir(exist_ok=True) + + venv.create(VENV, with_pip=True) + + if os.name == 'nt': + venv_bin = VENV / 'Scripts' + pip = venv_bin / 'pip.exe' + python = venv_bin / 'python.exe' + else: + venv_bin = VENV / 'bin' + pip = venv_bin / 'pip' + python = venv_bin / 'python' + + args = [str(pip), 'install', '-r', str(REQUIREMENTS_TXT), + '--disable-pip-version-check'] + + if not venv_created: + args.append('-q') + + subprocess.run(args, check=True) + + os.environ['HGAUTOMATION_BOOTSTRAPPED'] = '1' + os.environ['PATH'] = '%s%s%s' % ( + venv_bin, os.pathsep, os.environ['PATH']) + + subprocess.run([str(python), __file__] + sys.argv[1:], check=True) + + +def run(): + import hgautomation.cli as cli + + # Need to strip off main Python executable. + cli.main() + + +if __name__ == '__main__': + try: + if 'HGAUTOMATION_BOOTSTRAPPED' not in os.environ: + bootstrap() + else: + run() + except subprocess.CalledProcessError as e: + sys.exit(e.returncode) + except KeyboardInterrupt: + sys.exit(1) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/hgautomation/__init__.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/hgautomation/__init__.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,59 @@ +# __init__.py - High-level automation interfaces +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import pathlib +import secrets + +from .aws import ( + AWSConnection, +) + + +class HGAutomation: + """High-level interface for Mercurial automation. + + Holds global state, provides access to other primitives, etc. + """ + + def __init__(self, state_path: pathlib.Path): + self.state_path = state_path + + state_path.mkdir(exist_ok=True) + + def default_password(self): + """Obtain the default password to use for remote machines. + + A new password will be generated if one is not stored. + """ + p = self.state_path / 'default-password' + + try: + with p.open('r', encoding='ascii') as fh: + data = fh.read().strip() + + if data: + return data + + except FileNotFoundError: + pass + + password = secrets.token_urlsafe(24) + + with p.open('w', encoding='ascii') as fh: + fh.write(password) + fh.write('\n') + + p.chmod(0o0600) + + return password + + def aws_connection(self, region: str): + """Obtain an AWSConnection instance bound to a specific region.""" + + return AWSConnection(self, region) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/hgautomation/aws.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/hgautomation/aws.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,879 @@ +# aws.py - Automation code for Amazon Web Services +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import contextlib +import copy +import hashlib +import json +import os +import pathlib +import subprocess +import time + +import boto3 +import botocore.exceptions + +from .winrm import ( + run_powershell, + wait_for_winrm, +) + + +SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent + +INSTALL_WINDOWS_DEPENDENCIES = (SOURCE_ROOT / 'contrib' / + 'install-windows-dependencies.ps1') + + +KEY_PAIRS = { + 'automation', +} + + +SECURITY_GROUPS = { + 'windows-dev-1': { + 'description': 'Mercurial Windows instances that perform build automation', + 'ingress': [ + { + 'FromPort': 22, + 'ToPort': 22, + 'IpProtocol': 'tcp', + 'IpRanges': [ + { + 'CidrIp': '0.0.0.0/0', + 'Description': 'SSH from entire Internet', + }, + ], + }, + { + 'FromPort': 3389, + 'ToPort': 3389, + 'IpProtocol': 'tcp', + 'IpRanges': [ + { + 'CidrIp': '0.0.0.0/0', + 'Description': 'RDP from entire Internet', + }, + ], + + }, + { + 'FromPort': 5985, + 'ToPort': 5986, + 'IpProtocol': 'tcp', + 'IpRanges': [ + { + 'CidrIp': '0.0.0.0/0', + 'Description': 'PowerShell Remoting (Windows Remote Management)', + }, + ], + } + ], + }, +} + + +IAM_ROLES = { + 'ephemeral-ec2-role-1': { + 'description': 'Mercurial temporary EC2 instances', + 'policy_arns': [ + 'arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM', + ], + }, +} + + +ASSUME_ROLE_POLICY_DOCUMENT = ''' +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Service": "ec2.amazonaws.com" + }, + "Action": "sts:AssumeRole" + } + ] +} +'''.strip() + + +IAM_INSTANCE_PROFILES = { + 'ephemeral-ec2-1': { + 'roles': [ + 'ephemeral-ec2-role-1', + ], + } +} + + +# User Data for Windows EC2 instance. Mainly used to set the password +# and configure WinRM. +# Inspired by the User Data script used by Packer +# (from https://www.packer.io/intro/getting-started/build-image.html). +WINDOWS_USER_DATA = r''' + + +# TODO enable this once we figure out what is failing. +#$ErrorActionPreference = "stop" + +# Set administrator password +net user Administrator "%s" +wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE + +# First, make sure WinRM can't be connected to +netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block + +# Delete any existing WinRM listeners +winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null +winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null + +# Create a new WinRM listener and configure +winrm create winrm/config/listener?Address=*+Transport=HTTP +winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}' +winrm set winrm/config '@{MaxTimeoutms="7200000"}' +winrm set winrm/config/service '@{AllowUnencrypted="true"}' +winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}' +winrm set winrm/config/service/auth '@{Basic="true"}' +winrm set winrm/config/client/auth '@{Basic="true"}' + +# Configure UAC to allow privilege elevation in remote shells +$Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System' +$Setting = 'LocalAccountTokenFilterPolicy' +Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force + +# Configure and restart the WinRM Service; Enable the required firewall exception +Stop-Service -Name WinRM +Set-Service -Name WinRM -StartupType Automatic +netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any +Start-Service -Name WinRM + +# Disable firewall on private network interfaces so prompts don't appear. +Set-NetFirewallProfile -Name private -Enabled false + +'''.lstrip() + + +WINDOWS_BOOTSTRAP_POWERSHELL = ''' +Write-Output "installing PowerShell dependencies" +Install-PackageProvider -Name NuGet -MinimumVersion 2.8.5.201 -Force +Set-PSRepository -Name PSGallery -InstallationPolicy Trusted +Install-Module -Name OpenSSHUtils -RequiredVersion 0.0.2.0 + +Write-Output "installing OpenSSL server" +Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 +# Various tools will attempt to use older versions of .NET. So we enable +# the feature that provides them so it doesn't have to be auto-enabled +# later. +Write-Output "enabling .NET Framework feature" +Install-WindowsFeature -Name Net-Framework-Core +''' + + +class AWSConnection: + """Manages the state of a connection with AWS.""" + + def __init__(self, automation, region: str): + self.automation = automation + self.local_state_path = automation.state_path + + self.prefix = 'hg-' + + self.session = boto3.session.Session(region_name=region) + self.ec2client = self.session.client('ec2') + self.ec2resource = self.session.resource('ec2') + self.iamclient = self.session.client('iam') + self.iamresource = self.session.resource('iam') + + ensure_key_pairs(automation.state_path, self.ec2resource) + + self.security_groups = ensure_security_groups(self.ec2resource) + ensure_iam_state(self.iamresource) + + def key_pair_path_private(self, name): + """Path to a key pair private key file.""" + return self.local_state_path / 'keys' / ('keypair-%s' % name) + + def key_pair_path_public(self, name): + return self.local_state_path / 'keys' / ('keypair-%s.pub' % name) + + +def rsa_key_fingerprint(p: pathlib.Path): + """Compute the fingerprint of an RSA private key.""" + + # TODO use rsa package. + res = subprocess.run( + ['openssl', 'pkcs8', '-in', str(p), '-nocrypt', '-topk8', + '-outform', 'DER'], + capture_output=True, + check=True) + + sha1 = hashlib.sha1(res.stdout).hexdigest() + return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2])) + + +def ensure_key_pairs(state_path: pathlib.Path, ec2resource, prefix='hg-'): + remote_existing = {} + + for kpi in ec2resource.key_pairs.all(): + if kpi.name.startswith(prefix): + remote_existing[kpi.name[len(prefix):]] = kpi.key_fingerprint + + # Validate that we have these keys locally. + key_path = state_path / 'keys' + key_path.mkdir(exist_ok=True, mode=0o700) + + def remove_remote(name): + print('deleting key pair %s' % name) + key = ec2resource.KeyPair(name) + key.delete() + + def remove_local(name): + pub_full = key_path / ('keypair-%s.pub' % name) + priv_full = key_path / ('keypair-%s' % name) + + print('removing %s' % pub_full) + pub_full.unlink() + print('removing %s' % priv_full) + priv_full.unlink() + + local_existing = {} + + for f in sorted(os.listdir(key_path)): + if not f.startswith('keypair-') or not f.endswith('.pub'): + continue + + name = f[len('keypair-'):-len('.pub')] + + pub_full = key_path / f + priv_full = key_path / ('keypair-%s' % name) + + with open(pub_full, 'r', encoding='ascii') as fh: + data = fh.read() + + if not data.startswith('ssh-rsa '): + print('unexpected format for key pair file: %s; removing' % + pub_full) + pub_full.unlink() + priv_full.unlink() + continue + + local_existing[name] = rsa_key_fingerprint(priv_full) + + for name in sorted(set(remote_existing) | set(local_existing)): + if name not in local_existing: + actual = '%s%s' % (prefix, name) + print('remote key %s does not exist locally' % name) + remove_remote(actual) + del remote_existing[name] + + elif name not in remote_existing: + print('local key %s does not exist remotely' % name) + remove_local(name) + del local_existing[name] + + elif remote_existing[name] != local_existing[name]: + print('key fingerprint mismatch for %s; ' + 'removing from local and remote' % name) + remove_local(name) + remove_remote('%s%s' % (prefix, name)) + del local_existing[name] + del remote_existing[name] + + missing = KEY_PAIRS - set(remote_existing) + + for name in sorted(missing): + actual = '%s%s' % (prefix, name) + print('creating key pair %s' % actual) + + priv_full = key_path / ('keypair-%s' % name) + pub_full = key_path / ('keypair-%s.pub' % name) + + kp = ec2resource.create_key_pair(KeyName=actual) + + with priv_full.open('w', encoding='ascii') as fh: + fh.write(kp.key_material) + fh.write('\n') + + priv_full.chmod(0o0600) + + # SSH public key can be extracted via `ssh-keygen`. + with pub_full.open('w', encoding='ascii') as fh: + subprocess.run( + ['ssh-keygen', '-y', '-f', str(priv_full)], + stdout=fh, + check=True) + + pub_full.chmod(0o0600) + + +def delete_instance_profile(profile): + for role in profile.roles: + print('removing role %s from instance profile %s' % (role.name, + profile.name)) + profile.remove_role(RoleName=role.name) + + print('deleting instance profile %s' % profile.name) + profile.delete() + + +def ensure_iam_state(iamresource, prefix='hg-'): + """Ensure IAM state is in sync with our canonical definition.""" + + remote_profiles = {} + + for profile in iamresource.instance_profiles.all(): + if profile.name.startswith(prefix): + remote_profiles[profile.name[len(prefix):]] = profile + + for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)): + delete_instance_profile(remote_profiles[name]) + del remote_profiles[name] + + remote_roles = {} + + for role in iamresource.roles.all(): + if role.name.startswith(prefix): + remote_roles[role.name[len(prefix):]] = role + + for name in sorted(set(remote_roles) - set(IAM_ROLES)): + role = remote_roles[name] + + print('removing role %s' % role.name) + role.delete() + del remote_roles[name] + + # We've purged remote state that doesn't belong. Create missing + # instance profiles and roles. + for name in sorted(set(IAM_INSTANCE_PROFILES) - set(remote_profiles)): + actual = '%s%s' % (prefix, name) + print('creating IAM instance profile %s' % actual) + + profile = iamresource.create_instance_profile( + InstanceProfileName=actual) + remote_profiles[name] = profile + + for name in sorted(set(IAM_ROLES) - set(remote_roles)): + entry = IAM_ROLES[name] + + actual = '%s%s' % (prefix, name) + print('creating IAM role %s' % actual) + + role = iamresource.create_role( + RoleName=actual, + Description=entry['description'], + AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT, + ) + + remote_roles[name] = role + + for arn in entry['policy_arns']: + print('attaching policy %s to %s' % (arn, role.name)) + role.attach_policy(PolicyArn=arn) + + # Now reconcile state of profiles. + for name, meta in sorted(IAM_INSTANCE_PROFILES.items()): + profile = remote_profiles[name] + wanted = {'%s%s' % (prefix, role) for role in meta['roles']} + have = {role.name for role in profile.roles} + + for role in sorted(have - wanted): + print('removing role %s from %s' % (role, profile.name)) + profile.remove_role(RoleName=role) + + for role in sorted(wanted - have): + print('adding role %s to %s' % (role, profile.name)) + profile.add_role(RoleName=role) + + +def find_windows_server_2019_image(ec2resource): + """Find the Amazon published Windows Server 2019 base image.""" + + images = ec2resource.images.filter( + Filters=[ + { + 'Name': 'owner-alias', + 'Values': ['amazon'], + }, + { + 'Name': 'state', + 'Values': ['available'], + }, + { + 'Name': 'image-type', + 'Values': ['machine'], + }, + { + 'Name': 'name', + 'Values': ['Windows_Server-2019-English-Full-Base-2019.02.13'], + }, + ]) + + for image in images: + return image + + raise Exception('unable to find Windows Server 2019 image') + + +def ensure_security_groups(ec2resource, prefix='hg-'): + """Ensure all necessary Mercurial security groups are present. + + All security groups are prefixed with ``hg-`` by default. Any security + groups having this prefix but aren't in our list are deleted. + """ + existing = {} + + for group in ec2resource.security_groups.all(): + if group.group_name.startswith(prefix): + existing[group.group_name[len(prefix):]] = group + + purge = set(existing) - set(SECURITY_GROUPS) + + for name in sorted(purge): + group = existing[name] + print('removing legacy security group: %s' % group.group_name) + group.delete() + + security_groups = {} + + for name, group in sorted(SECURITY_GROUPS.items()): + if name in existing: + security_groups[name] = existing[name] + continue + + actual = '%s%s' % (prefix, name) + print('adding security group %s' % actual) + + group_res = ec2resource.create_security_group( + Description=group['description'], + GroupName=actual, + ) + + group_res.authorize_ingress( + IpPermissions=group['ingress'], + ) + + security_groups[name] = group_res + + return security_groups + + +def terminate_ec2_instances(ec2resource, prefix='hg-'): + """Terminate all EC2 instances managed by us.""" + waiting = [] + + for instance in ec2resource.instances.all(): + if instance.state['Name'] == 'terminated': + continue + + for tag in instance.tags or []: + if tag['Key'] == 'Name' and tag['Value'].startswith(prefix): + print('terminating %s' % instance.id) + instance.terminate() + waiting.append(instance) + + for instance in waiting: + instance.wait_until_terminated() + + +def remove_resources(c, prefix='hg-'): + """Purge all of our resources in this EC2 region.""" + ec2resource = c.ec2resource + iamresource = c.iamresource + + terminate_ec2_instances(ec2resource, prefix=prefix) + + for image in ec2resource.images.all(): + if image.name.startswith(prefix): + remove_ami(ec2resource, image) + + for group in ec2resource.security_groups.all(): + if group.group_name.startswith(prefix): + print('removing security group %s' % group.group_name) + group.delete() + + for profile in iamresource.instance_profiles.all(): + if profile.name.startswith(prefix): + delete_instance_profile(profile) + + for role in iamresource.roles.all(): + if role.name.startswith(prefix): + print('removing role %s' % role.name) + role.delete() + + +def wait_for_ip_addresses(instances): + """Wait for the public IP addresses of an iterable of instances.""" + for instance in instances: + while True: + if not instance.public_ip_address: + time.sleep(2) + instance.reload() + continue + + print('public IP address for %s: %s' % ( + instance.id, instance.public_ip_address)) + break + + +def remove_ami(ec2resource, image): + """Remove an AMI and its underlying snapshots.""" + snapshots = [] + + for device in image.block_device_mappings: + if 'Ebs' in device: + snapshots.append(ec2resource.Snapshot(device['Ebs']['SnapshotId'])) + + print('deregistering %s' % image.id) + image.deregister() + + for snapshot in snapshots: + print('deleting snapshot %s' % snapshot.id) + snapshot.delete() + + +def wait_for_ssm(ssmclient, instances): + """Wait for SSM to come online for an iterable of instance IDs.""" + while True: + res = ssmclient.describe_instance_information( + Filters=[ + { + 'Key': 'InstanceIds', + 'Values': [i.id for i in instances], + }, + ], + ) + + available = len(res['InstanceInformationList']) + wanted = len(instances) + + print('%d/%d instances available in SSM' % (available, wanted)) + + if available == wanted: + return + + time.sleep(2) + + +def run_ssm_command(ssmclient, instances, document_name, parameters): + """Run a PowerShell script on an EC2 instance.""" + + res = ssmclient.send_command( + InstanceIds=[i.id for i in instances], + DocumentName=document_name, + Parameters=parameters, + CloudWatchOutputConfig={ + 'CloudWatchOutputEnabled': True, + }, + ) + + command_id = res['Command']['CommandId'] + + for instance in instances: + while True: + try: + res = ssmclient.get_command_invocation( + CommandId=command_id, + InstanceId=instance.id, + ) + except botocore.exceptions.ClientError as e: + if e.response['Error']['Code'] == 'InvocationDoesNotExist': + print('could not find SSM command invocation; waiting') + time.sleep(1) + continue + else: + raise + + if res['Status'] == 'Success': + break + elif res['Status'] in ('Pending', 'InProgress', 'Delayed'): + time.sleep(2) + else: + raise Exception('command failed on %s: %s' % ( + instance.id, res['Status'])) + + +@contextlib.contextmanager +def temporary_ec2_instances(ec2resource, config): + """Create temporary EC2 instances. + + This is a proxy to ``ec2client.run_instances(**config)`` that takes care of + managing the lifecycle of the instances. + + When the context manager exits, the instances are terminated. + + The context manager evaluates to the list of data structures + describing each created instance. The instances may not be available + for work immediately: it is up to the caller to wait for the instance + to start responding. + """ + + ids = None + + try: + res = ec2resource.create_instances(**config) + + ids = [i.id for i in res] + print('started instances: %s' % ' '.join(ids)) + + yield res + finally: + if ids: + print('terminating instances: %s' % ' '.join(ids)) + for instance in res: + instance.terminate() + print('terminated %d instances' % len(ids)) + + +@contextlib.contextmanager +def create_temp_windows_ec2_instances(c: AWSConnection, config): + """Create temporary Windows EC2 instances. + + This is a higher-level wrapper around ``create_temp_ec2_instances()`` that + configures the Windows instance for Windows Remote Management. The emitted + instances will have a ``winrm_client`` attribute containing a + ``pypsrp.client.Client`` instance bound to the instance. + """ + if 'IamInstanceProfile' in config: + raise ValueError('IamInstanceProfile cannot be provided in config') + if 'UserData' in config: + raise ValueError('UserData cannot be provided in config') + + password = c.automation.default_password() + + config = copy.deepcopy(config) + config['IamInstanceProfile'] = { + 'Name': 'hg-ephemeral-ec2-1', + } + config.setdefault('TagSpecifications', []).append({ + 'ResourceType': 'instance', + 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}], + }) + config['UserData'] = WINDOWS_USER_DATA % password + + with temporary_ec2_instances(c.ec2resource, config) as instances: + wait_for_ip_addresses(instances) + + print('waiting for Windows Remote Management service...') + + for instance in instances: + client = wait_for_winrm(instance.public_ip_address, 'Administrator', password) + print('established WinRM connection to %s' % instance.id) + instance.winrm_client = client + + yield instances + + +def ensure_windows_dev_ami(c: AWSConnection, prefix='hg-'): + """Ensure Windows Development AMI is available and up-to-date. + + If necessary, a modern AMI will be built by starting a temporary EC2 + instance and bootstrapping it. + + Obsolete AMIs will be deleted so there is only a single AMI having the + desired name. + + Returns an ``ec2.Image`` of either an existing AMI or a newly-built + one. + """ + ec2client = c.ec2client + ec2resource = c.ec2resource + ssmclient = c.session.client('ssm') + + name = '%s%s' % (prefix, 'windows-dev') + + config = { + 'BlockDeviceMappings': [ + { + 'DeviceName': '/dev/sda1', + 'Ebs': { + 'DeleteOnTermination': True, + 'VolumeSize': 32, + 'VolumeType': 'gp2', + }, + } + ], + 'ImageId': find_windows_server_2019_image(ec2resource).id, + 'InstanceInitiatedShutdownBehavior': 'stop', + 'InstanceType': 't3.medium', + 'KeyName': '%sautomation' % prefix, + 'MaxCount': 1, + 'MinCount': 1, + 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], + } + + commands = [ + # Need to start the service so sshd_config is generated. + 'Start-Service sshd', + 'Write-Output "modifying sshd_config"', + r'$content = Get-Content C:\ProgramData\ssh\sshd_config', + '$content = $content -replace "Match Group administrators","" -replace "AuthorizedKeysFile __PROGRAMDATA__/ssh/administrators_authorized_keys",""', + r'$content | Set-Content C:\ProgramData\ssh\sshd_config', + 'Import-Module OpenSSHUtils', + r'Repair-SshdConfigPermission C:\ProgramData\ssh\sshd_config -Confirm:$false', + 'Restart-Service sshd', + 'Write-Output "installing OpenSSL client"', + 'Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0', + 'Set-Service -Name sshd -StartupType "Automatic"', + 'Write-Output "OpenSSH server running"', + ] + + with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh: + commands.extend(l.rstrip() for l in fh) + + # Disable Windows Defender when bootstrapping because it just slows + # things down. + commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true') + commands.append('Set-MpPreference -DisableRealtimeMonitoring $false') + + # Compute a deterministic fingerprint to determine whether image needs + # to be regenerated. + fingerprint = { + 'instance_config': config, + 'user_data': WINDOWS_USER_DATA, + 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL, + 'bootstrap_commands': commands, + } + + fingerprint = json.dumps(fingerprint, sort_keys=True) + fingerprint = hashlib.sha256(fingerprint.encode('utf-8')).hexdigest() + + # Find existing AMIs with this name and delete the ones that are invalid. + # Store a reference to a good image so it can be returned one the + # image state is reconciled. + images = ec2resource.images.filter( + Filters=[{'Name': 'name', 'Values': [name]}]) + + existing_image = None + + for image in images: + if image.tags is None: + print('image %s for %s lacks required tags; removing' % ( + image.id, image.name)) + remove_ami(ec2resource, image) + else: + tags = {t['Key']: t['Value'] for t in image.tags} + + if tags.get('HGIMAGEFINGERPRINT') == fingerprint: + existing_image = image + else: + print('image %s for %s has wrong fingerprint; removing' % ( + image.id, image.name)) + remove_ami(ec2resource, image) + + if existing_image: + return existing_image + + print('no suitable Windows development image found; creating one...') + + with create_temp_windows_ec2_instances(c, config) as instances: + assert len(instances) == 1 + instance = instances[0] + + wait_for_ssm(ssmclient, [instance]) + + # On first boot, install various Windows updates. + # We would ideally use PowerShell Remoting for this. However, there are + # trust issues that make it difficult to invoke Windows Update + # remotely. So we use SSM, which has a mechanism for running Windows + # Update. + print('installing Windows features...') + run_ssm_command( + ssmclient, + [instance], + 'AWS-RunPowerShellScript', + { + 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'), + }, + ) + + # Reboot so all updates are fully applied. + print('rebooting instance %s' % instance.id) + ec2client.reboot_instances(InstanceIds=[instance.id]) + + time.sleep(15) + + print('waiting for Windows Remote Management to come back...') + client = wait_for_winrm(instance.public_ip_address, 'Administrator', + c.automation.default_password()) + print('established WinRM connection to %s' % instance.id) + instance.winrm_client = client + + print('bootstrapping instance...') + run_powershell(instance.winrm_client, '\n'.join(commands)) + + print('bootstrap completed; stopping %s to create image' % instance.id) + instance.stop() + + ec2client.get_waiter('instance_stopped').wait( + InstanceIds=[instance.id], + WaiterConfig={ + 'Delay': 5, + }) + print('%s is stopped' % instance.id) + + image = instance.create_image( + Name=name, + Description='Mercurial Windows development environment', + ) + + image.create_tags(Tags=[ + { + 'Key': 'HGIMAGEFINGERPRINT', + 'Value': fingerprint, + }, + ]) + + print('waiting for image %s' % image.id) + + ec2client.get_waiter('image_available').wait( + ImageIds=[image.id], + ) + + print('image %s available as %s' % (image.id, image.name)) + + return image + + +@contextlib.contextmanager +def temporary_windows_dev_instances(c: AWSConnection, image, instance_type, + prefix='hg-', disable_antivirus=False): + """Create a temporary Windows development EC2 instance. + + Context manager resolves to the list of ``EC2.Instance`` that were created. + """ + config = { + 'BlockDeviceMappings': [ + { + 'DeviceName': '/dev/sda1', + 'Ebs': { + 'DeleteOnTermination': True, + 'VolumeSize': 32, + 'VolumeType': 'gp2', + }, + } + ], + 'ImageId': image.id, + 'InstanceInitiatedShutdownBehavior': 'stop', + 'InstanceType': instance_type, + 'KeyName': '%sautomation' % prefix, + 'MaxCount': 1, + 'MinCount': 1, + 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], + } + + with create_temp_windows_ec2_instances(c, config) as instances: + if disable_antivirus: + for instance in instances: + run_powershell( + instance.winrm_client, + 'Set-MpPreference -DisableRealtimeMonitoring $true') + + yield instances diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/hgautomation/cli.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/hgautomation/cli.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,273 @@ +# cli.py - Command line interface for automation +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import argparse +import os +import pathlib + +from . import ( + aws, + HGAutomation, + windows, +) + + +SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent +DIST_PATH = SOURCE_ROOT / 'dist' + + +def bootstrap_windows_dev(hga: HGAutomation, aws_region): + c = hga.aws_connection(aws_region) + image = aws.ensure_windows_dev_ami(c) + print('Windows development AMI available as %s' % image.id) + + +def build_inno(hga: HGAutomation, aws_region, arch, revision, version): + c = hga.aws_connection(aws_region) + image = aws.ensure_windows_dev_ami(c) + DIST_PATH.mkdir(exist_ok=True) + + with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts: + instance = insts[0] + + windows.synchronize_hg(SOURCE_ROOT, revision, instance) + + for a in arch: + windows.build_inno_installer(instance.winrm_client, a, + DIST_PATH, + version=version) + + +def build_wix(hga: HGAutomation, aws_region, arch, revision, version): + c = hga.aws_connection(aws_region) + image = aws.ensure_windows_dev_ami(c) + DIST_PATH.mkdir(exist_ok=True) + + with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts: + instance = insts[0] + + windows.synchronize_hg(SOURCE_ROOT, revision, instance) + + for a in arch: + windows.build_wix_installer(instance.winrm_client, a, + DIST_PATH, version=version) + + +def build_windows_wheel(hga: HGAutomation, aws_region, arch, revision): + c = hga.aws_connection(aws_region) + image = aws.ensure_windows_dev_ami(c) + DIST_PATH.mkdir(exist_ok=True) + + with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts: + instance = insts[0] + + windows.synchronize_hg(SOURCE_ROOT, revision, instance) + + for a in arch: + windows.build_wheel(instance.winrm_client, a, DIST_PATH) + + +def build_all_windows_packages(hga: HGAutomation, aws_region, revision): + c = hga.aws_connection(aws_region) + image = aws.ensure_windows_dev_ami(c) + DIST_PATH.mkdir(exist_ok=True) + + with aws.temporary_windows_dev_instances(c, image, 't3.medium') as insts: + instance = insts[0] + + winrm_client = instance.winrm_client + + windows.synchronize_hg(SOURCE_ROOT, revision, instance) + + for arch in ('x86', 'x64'): + windows.purge_hg(winrm_client) + windows.build_wheel(winrm_client, arch, DIST_PATH) + windows.purge_hg(winrm_client) + windows.build_inno_installer(winrm_client, arch, DIST_PATH) + windows.purge_hg(winrm_client) + windows.build_wix_installer(winrm_client, arch, DIST_PATH) + + +def terminate_ec2_instances(hga: HGAutomation, aws_region): + c = hga.aws_connection(aws_region) + aws.terminate_ec2_instances(c.ec2resource) + + +def purge_ec2_resources(hga: HGAutomation, aws_region): + c = hga.aws_connection(aws_region) + aws.remove_resources(c) + + +def run_tests_windows(hga: HGAutomation, aws_region, instance_type, + python_version, arch, test_flags): + c = hga.aws_connection(aws_region) + image = aws.ensure_windows_dev_ami(c) + + with aws.temporary_windows_dev_instances(c, image, instance_type, + disable_antivirus=True) as insts: + instance = insts[0] + + windows.synchronize_hg(SOURCE_ROOT, '.', instance) + windows.run_tests(instance.winrm_client, python_version, arch, + test_flags) + + +def get_parser(): + parser = argparse.ArgumentParser() + + parser.add_argument( + '--state-path', + default='~/.hgautomation', + help='Path for local state files', + ) + parser.add_argument( + '--aws-region', + help='AWS region to use', + default='us-west-1', + ) + + subparsers = parser.add_subparsers() + + sp = subparsers.add_parser( + 'bootstrap-windows-dev', + help='Bootstrap the Windows development environment', + ) + sp.set_defaults(func=bootstrap_windows_dev) + + sp = subparsers.add_parser( + 'build-all-windows-packages', + help='Build all Windows packages', + ) + sp.add_argument( + '--revision', + help='Mercurial revision to build', + default='.', + ) + sp.set_defaults(func=build_all_windows_packages) + + sp = subparsers.add_parser( + 'build-inno', + help='Build Inno Setup installer(s)', + ) + sp.add_argument( + '--arch', + help='Architecture to build for', + choices={'x86', 'x64'}, + nargs='*', + default=['x64'], + ) + sp.add_argument( + '--revision', + help='Mercurial revision to build', + default='.', + ) + sp.add_argument( + '--version', + help='Mercurial version string to use in installer', + ) + sp.set_defaults(func=build_inno) + + sp = subparsers.add_parser( + 'build-windows-wheel', + help='Build Windows wheel(s)', + ) + sp.add_argument( + '--arch', + help='Architecture to build for', + choices={'x86', 'x64'}, + nargs='*', + default=['x64'], + ) + sp.add_argument( + '--revision', + help='Mercurial revision to build', + default='.', + ) + sp.set_defaults(func=build_windows_wheel) + + sp = subparsers.add_parser( + 'build-wix', + help='Build WiX installer(s)' + ) + sp.add_argument( + '--arch', + help='Architecture to build for', + choices={'x86', 'x64'}, + nargs='*', + default=['x64'], + ) + sp.add_argument( + '--revision', + help='Mercurial revision to build', + default='.', + ) + sp.add_argument( + '--version', + help='Mercurial version string to use in installer', + ) + sp.set_defaults(func=build_wix) + + sp = subparsers.add_parser( + 'terminate-ec2-instances', + help='Terminate all active EC2 instances managed by us', + ) + sp.set_defaults(func=terminate_ec2_instances) + + sp = subparsers.add_parser( + 'purge-ec2-resources', + help='Purge all EC2 resources managed by us', + ) + sp.set_defaults(func=purge_ec2_resources) + + sp = subparsers.add_parser( + 'run-tests-windows', + help='Run tests on Windows', + ) + sp.add_argument( + '--instance-type', + help='EC2 instance type to use', + default='t3.medium', + ) + sp.add_argument( + '--python-version', + help='Python version to use', + choices={'2.7', '3.5', '3.6', '3.7', '3.8'}, + default='2.7', + ) + sp.add_argument( + '--arch', + help='Architecture to test', + choices={'x86', 'x64'}, + default='x64', + ) + sp.add_argument( + '--test-flags', + help='Extra command line flags to pass to run-tests.py', + ) + sp.set_defaults(func=run_tests_windows) + + return parser + + +def main(): + parser = get_parser() + args = parser.parse_args() + + local_state_path = pathlib.Path(os.path.expanduser(args.state_path)) + automation = HGAutomation(local_state_path) + + if not hasattr(args, 'func'): + parser.print_help() + return + + kwargs = dict(vars(args)) + del kwargs['func'] + del kwargs['state_path'] + + args.func(automation, **kwargs) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/hgautomation/windows.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/hgautomation/windows.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,287 @@ +# windows.py - Automation specific to Windows +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import re +import subprocess +import tempfile + +from .winrm import ( + run_powershell, +) + + +# PowerShell commands to activate a Visual Studio 2008 environment. +# This is essentially a port of vcvarsall.bat to PowerShell. +ACTIVATE_VC9_AMD64 = r''' +Write-Output "activating Visual Studio 2008 environment for AMD64" +$root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0" +$Env:VCINSTALLDIR = "${root}\VC\" +$Env:WindowsSdkDir = "${root}\WinSDK\" +$Env:PATH = "${root}\VC\Bin\amd64;${root}\WinSDK\Bin\x64;${root}\WinSDK\Bin;$Env:PATH" +$Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:PATH" +$Env:LIB = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIB" +$Env:LIBPATH = "${root}\VC\Lib\amd64;${root}\WinSDK\Lib\x64;$Env:LIBPATH" +'''.lstrip() + +ACTIVATE_VC9_X86 = r''' +Write-Output "activating Visual Studio 2008 environment for x86" +$root = "$env:LOCALAPPDATA\Programs\Common\Microsoft\Visual C++ for Python\9.0" +$Env:VCINSTALLDIR = "${root}\VC\" +$Env:WindowsSdkDir = "${root}\WinSDK\" +$Env:PATH = "${root}\VC\Bin;${root}\WinSDK\Bin;$Env:PATH" +$Env:INCLUDE = "${root}\VC\Include;${root}\WinSDK\Include;$Env:INCLUDE" +$Env:LIB = "${root}\VC\Lib;${root}\WinSDK\Lib;$Env:LIB" +$Env:LIBPATH = "${root}\VC\lib;${root}\WinSDK\Lib:$Env:LIBPATH" +'''.lstrip() + +HG_PURGE = r''' +$Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH" +Set-Location C:\hgdev\src +hg.exe --config extensions.purge= purge --all +if ($LASTEXITCODE -ne 0) { + throw "process exited non-0: $LASTEXITCODE" +} +Write-Output "purged Mercurial repo" +''' + +HG_UPDATE_CLEAN = r''' +$Env:PATH = "C:\hgdev\venv-bootstrap\Scripts;$Env:PATH" +Set-Location C:\hgdev\src +hg.exe --config extensions.purge= purge --all +if ($LASTEXITCODE -ne 0) {{ + throw "process exited non-0: $LASTEXITCODE" +}} +hg.exe update -C {revision} +if ($LASTEXITCODE -ne 0) {{ + throw "process exited non-0: $LASTEXITCODE" +}} +hg.exe log -r . +Write-Output "updated Mercurial working directory to {revision}" +'''.lstrip() + +BUILD_INNO = r''' +Set-Location C:\hgdev\src +$python = "C:\hgdev\python27-{arch}\python.exe" +C:\hgdev\python37-x64\python.exe contrib\packaging\inno\build.py --python $python +if ($LASTEXITCODE -ne 0) {{ + throw "process exited non-0: $LASTEXITCODE" +}} +'''.lstrip() + +BUILD_WHEEL = r''' +Set-Location C:\hgdev\src +C:\hgdev\python27-{arch}\Scripts\pip.exe wheel --wheel-dir dist . +if ($LASTEXITCODE -ne 0) {{ + throw "process exited non-0: $LASTEXITCODE" +}} +''' + +BUILD_WIX = r''' +Set-Location C:\hgdev\src +$python = "C:\hgdev\python27-{arch}\python.exe" +C:\hgdev\python37-x64\python.exe contrib\packaging\wix\build.py --python $python {extra_args} +if ($LASTEXITCODE -ne 0) {{ + throw "process exited non-0: $LASTEXITCODE" +}} +''' + +RUN_TESTS = r''' +C:\hgdev\MinGW\msys\1.0\bin\sh.exe --login -c "cd /c/hgdev/src/tests && /c/hgdev/{python_path}/python.exe run-tests.py {test_flags}" +if ($LASTEXITCODE -ne 0) {{ + throw "process exited non-0: $LASTEXITCODE" +}} +''' + + +def get_vc_prefix(arch): + if arch == 'x86': + return ACTIVATE_VC9_X86 + elif arch == 'x64': + return ACTIVATE_VC9_AMD64 + else: + raise ValueError('illegal arch: %s; must be x86 or x64' % arch) + + +def fix_authorized_keys_permissions(winrm_client, path): + commands = [ + '$ErrorActionPreference = "Stop"', + 'Repair-AuthorizedKeyPermission -FilePath %s -Confirm:$false' % path, + r'icacls %s /remove:g "NT Service\sshd"' % path, + ] + + run_powershell(winrm_client, '\n'.join(commands)) + + +def synchronize_hg(hg_repo: pathlib.Path, revision: str, ec2_instance): + """Synchronize local Mercurial repo to remote EC2 instance.""" + + winrm_client = ec2_instance.winrm_client + + with tempfile.TemporaryDirectory() as temp_dir: + temp_dir = pathlib.Path(temp_dir) + + ssh_dir = temp_dir / '.ssh' + ssh_dir.mkdir() + ssh_dir.chmod(0o0700) + + # Generate SSH key to use for communication. + subprocess.run([ + 'ssh-keygen', '-t', 'rsa', '-b', '4096', '-N', '', + '-f', str(ssh_dir / 'id_rsa')], + check=True, capture_output=True) + + # Add it to ~/.ssh/authorized_keys on remote. + # This assumes the file doesn't already exist. + authorized_keys = r'c:\Users\Administrator\.ssh\authorized_keys' + winrm_client.execute_cmd(r'mkdir c:\Users\Administrator\.ssh') + winrm_client.copy(str(ssh_dir / 'id_rsa.pub'), authorized_keys) + fix_authorized_keys_permissions(winrm_client, authorized_keys) + + public_ip = ec2_instance.public_ip_address + + ssh_config = temp_dir / '.ssh' / 'config' + + with open(ssh_config, 'w', encoding='utf-8') as fh: + fh.write('Host %s\n' % public_ip) + fh.write(' User Administrator\n') + fh.write(' StrictHostKeyChecking no\n') + fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts')) + fh.write(' IdentityFile %s\n' % (ssh_dir / 'id_rsa')) + + env = dict(os.environ) + env['HGPLAIN'] = '1' + env['HGENCODING'] = 'utf-8' + + hg_bin = hg_repo / 'hg' + + res = subprocess.run( + ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'], + cwd=str(hg_repo), env=env, check=True, capture_output=True) + + full_revision = res.stdout.decode('ascii') + + args = [ + 'python2.7', hg_bin, + '--config', 'ui.ssh=ssh -F %s' % ssh_config, + '--config', 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe', + 'push', '-r', full_revision, 'ssh://%s/c:/hgdev/src' % public_ip, + ] + + subprocess.run(args, cwd=str(hg_repo), env=env, check=True) + + run_powershell(winrm_client, + HG_UPDATE_CLEAN.format(revision=full_revision)) + + # TODO detect dirty local working directory and synchronize accordingly. + + +def purge_hg(winrm_client): + """Purge the Mercurial source repository on an EC2 instance.""" + run_powershell(winrm_client, HG_PURGE) + + +def find_latest_dist(winrm_client, pattern): + """Find path to newest file in dist/ directory matching a pattern.""" + + res = winrm_client.execute_ps( + r'$v = Get-ChildItem -Path C:\hgdev\src\dist -Filter "%s" ' + '| Sort-Object LastWriteTime -Descending ' + '| Select-Object -First 1\n' + '$v.name' % pattern + ) + return res[0] + + +def copy_latest_dist(winrm_client, pattern, dest_path): + """Copy latest file matching pattern in dist/ directory. + + Given a WinRM client and a file pattern, find the latest file on the remote + matching that pattern and copy it to the ``dest_path`` directory on the + local machine. + """ + latest = find_latest_dist(winrm_client, pattern) + source = r'C:\hgdev\src\dist\%s' % latest + dest = dest_path / latest + print('copying %s to %s' % (source, dest)) + winrm_client.fetch(source, str(dest)) + + +def build_inno_installer(winrm_client, arch: str, dest_path: pathlib.Path, + version=None): + """Build the Inno Setup installer on a remote machine. + + Using a WinRM client, remote commands are executed to build + a Mercurial Inno Setup installer. + """ + print('building Inno Setup installer for %s' % arch) + + extra_args = [] + if version: + extra_args.extend(['--version', version]) + + ps = get_vc_prefix(arch) + BUILD_INNO.format(arch=arch, + extra_args=' '.join(extra_args)) + run_powershell(winrm_client, ps) + copy_latest_dist(winrm_client, '*.exe', dest_path) + + +def build_wheel(winrm_client, arch: str, dest_path: pathlib.Path): + """Build Python wheels on a remote machine. + + Using a WinRM client, remote commands are executed to build a Python wheel + for Mercurial. + """ + print('Building Windows wheel for %s' % arch) + ps = get_vc_prefix(arch) + BUILD_WHEEL.format(arch=arch) + run_powershell(winrm_client, ps) + copy_latest_dist(winrm_client, '*.whl', dest_path) + + +def build_wix_installer(winrm_client, arch: str, dest_path: pathlib.Path, + version=None): + """Build the WiX installer on a remote machine. + + Using a WinRM client, remote commands are executed to build a WiX installer. + """ + print('Building WiX installer for %s' % arch) + extra_args = [] + if version: + extra_args.extend(['--version', version]) + + ps = get_vc_prefix(arch) + BUILD_WIX.format(arch=arch, + extra_args=' '.join(extra_args)) + run_powershell(winrm_client, ps) + copy_latest_dist(winrm_client, '*.msi', dest_path) + + +def run_tests(winrm_client, python_version, arch, test_flags=''): + """Run tests on a remote Windows machine. + + ``python_version`` is a ``X.Y`` string like ``2.7`` or ``3.7``. + ``arch`` is ``x86`` or ``x64``. + ``test_flags`` is a str representing extra arguments to pass to + ``run-tests.py``. + """ + if not re.match(r'\d\.\d', python_version): + raise ValueError(r'python_version must be \d.\d; got %s' % + python_version) + + if arch not in ('x86', 'x64'): + raise ValueError('arch must be x86 or x64; got %s' % arch) + + python_path = 'python%s-%s' % (python_version.replace('.', ''), arch) + + ps = RUN_TESTS.format( + python_path=python_path, + test_flags=test_flags or '', + ) + + run_powershell(winrm_client, ps) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/hgautomation/winrm.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/hgautomation/winrm.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,82 @@ +# winrm.py - Interact with Windows Remote Management (WinRM) +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import logging +import pprint +import time + +from pypsrp.client import ( + Client, +) +from pypsrp.powershell import ( + PowerShell, + PSInvocationState, + RunspacePool, +) +import requests.exceptions + + +logger = logging.getLogger(__name__) + + +def wait_for_winrm(host, username, password, timeout=120, ssl=False): + """Wait for the Windows Remoting (WinRM) service to become available. + + Returns a ``psrpclient.Client`` instance. + """ + + end_time = time.time() + timeout + + while True: + try: + client = Client(host, username=username, password=password, + ssl=ssl, connection_timeout=5) + client.execute_cmd('echo "hello world"') + return client + except requests.exceptions.ConnectionError: + if time.time() >= end_time: + raise + + time.sleep(1) + + +def format_object(o): + if isinstance(o, str): + return o + + try: + o = str(o) + except TypeError: + o = pprint.pformat(o.extended_properties) + + return o + + +def run_powershell(client, script): + with RunspacePool(client.wsman) as pool: + ps = PowerShell(pool) + ps.add_script(script) + + ps.begin_invoke() + + while ps.state == PSInvocationState.RUNNING: + ps.poll_invoke() + for o in ps.output: + print(format_object(o)) + + ps.output[:] = [] + + ps.end_invoke() + + for o in ps.output: + print(format_object(o)) + + if ps.state == PSInvocationState.FAILED: + raise Exception('PowerShell execution failed: %s' % + ' '.join(map(format_object, ps.streams.error))) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/requirements.txt Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,119 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile -U --generate-hashes --output-file contrib/automation/requirements.txt contrib/automation/requirements.txt.in +# +asn1crypto==0.24.0 \ + --hash=sha256:2f1adbb7546ed199e3c90ef23ec95c5cf3585bac7d11fb7eb562a3fe89c64e87 \ + --hash=sha256:9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49 \ + # via cryptography +boto3==1.9.111 \ + --hash=sha256:06414c75d1f62af7d04fd652b38d1e4fd3cfd6b35bad978466af88e2aaecd00d \ + --hash=sha256:f3b77dff382374773d02411fa47ee408f4f503aeebd837fd9dc9ed8635bc5e8e +botocore==1.12.111 \ + --hash=sha256:6af473c52d5e3e7ff82de5334e9fee96b2d5ec2df5d78bc00cd9937e2573a7a8 \ + --hash=sha256:9f5123c7be704b17aeacae99b5842ab17bda1f799dd29134de8c70e0a50a45d7 \ + # via boto3, s3transfer +certifi==2019.3.9 \ + --hash=sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5 \ + --hash=sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae \ + # via requests +cffi==1.12.2 \ + --hash=sha256:00b97afa72c233495560a0793cdc86c2571721b4271c0667addc83c417f3d90f \ + --hash=sha256:0ba1b0c90f2124459f6966a10c03794082a2f3985cd699d7d63c4a8dae113e11 \ + --hash=sha256:0bffb69da295a4fc3349f2ec7cbe16b8ba057b0a593a92cbe8396e535244ee9d \ + --hash=sha256:21469a2b1082088d11ccd79dd84157ba42d940064abbfa59cf5f024c19cf4891 \ + --hash=sha256:2e4812f7fa984bf1ab253a40f1f4391b604f7fc424a3e21f7de542a7f8f7aedf \ + --hash=sha256:2eac2cdd07b9049dd4e68449b90d3ef1adc7c759463af5beb53a84f1db62e36c \ + --hash=sha256:2f9089979d7456c74d21303c7851f158833d48fb265876923edcb2d0194104ed \ + --hash=sha256:3dd13feff00bddb0bd2d650cdb7338f815c1789a91a6f68fdc00e5c5ed40329b \ + --hash=sha256:4065c32b52f4b142f417af6f33a5024edc1336aa845b9d5a8d86071f6fcaac5a \ + --hash=sha256:51a4ba1256e9003a3acf508e3b4f4661bebd015b8180cc31849da222426ef585 \ + --hash=sha256:59888faac06403767c0cf8cfb3f4a777b2939b1fbd9f729299b5384f097f05ea \ + --hash=sha256:59c87886640574d8b14910840327f5cd15954e26ed0bbd4e7cef95fa5aef218f \ + --hash=sha256:610fc7d6db6c56a244c2701575f6851461753c60f73f2de89c79bbf1cc807f33 \ + --hash=sha256:70aeadeecb281ea901bf4230c6222af0248c41044d6f57401a614ea59d96d145 \ + --hash=sha256:71e1296d5e66c59cd2c0f2d72dc476d42afe02aeddc833d8e05630a0551dad7a \ + --hash=sha256:8fc7a49b440ea752cfdf1d51a586fd08d395ff7a5d555dc69e84b1939f7ddee3 \ + --hash=sha256:9b5c2afd2d6e3771d516045a6cfa11a8da9a60e3d128746a7fe9ab36dfe7221f \ + --hash=sha256:9c759051ebcb244d9d55ee791259ddd158188d15adee3c152502d3b69005e6bd \ + --hash=sha256:b4d1011fec5ec12aa7cc10c05a2f2f12dfa0adfe958e56ae38dc140614035804 \ + --hash=sha256:b4f1d6332339ecc61275bebd1f7b674098a66fea11a00c84d1c58851e618dc0d \ + --hash=sha256:c030cda3dc8e62b814831faa4eb93dd9a46498af8cd1d5c178c2de856972fd92 \ + --hash=sha256:c2e1f2012e56d61390c0e668c20c4fb0ae667c44d6f6a2eeea5d7148dcd3df9f \ + --hash=sha256:c37c77d6562074452120fc6c02ad86ec928f5710fbc435a181d69334b4de1d84 \ + --hash=sha256:c8149780c60f8fd02752d0429246088c6c04e234b895c4a42e1ea9b4de8d27fb \ + --hash=sha256:cbeeef1dc3c4299bd746b774f019de9e4672f7cc666c777cd5b409f0b746dac7 \ + --hash=sha256:e113878a446c6228669144ae8a56e268c91b7f1fafae927adc4879d9849e0ea7 \ + --hash=sha256:e21162bf941b85c0cda08224dade5def9360f53b09f9f259adb85fc7dd0e7b35 \ + --hash=sha256:fb6934ef4744becbda3143d30c6604718871495a5e36c408431bf33d9c146889 \ + # via cryptography +chardet==3.0.4 \ + --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ + --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ + # via requests +cryptography==2.6.1 \ + --hash=sha256:066f815f1fe46020877c5983a7e747ae140f517f1b09030ec098503575265ce1 \ + --hash=sha256:210210d9df0afba9e000636e97810117dc55b7157c903a55716bb73e3ae07705 \ + --hash=sha256:26c821cbeb683facb966045e2064303029d572a87ee69ca5a1bf54bf55f93ca6 \ + --hash=sha256:2afb83308dc5c5255149ff7d3fb9964f7c9ee3d59b603ec18ccf5b0a8852e2b1 \ + --hash=sha256:2db34e5c45988f36f7a08a7ab2b69638994a8923853dec2d4af121f689c66dc8 \ + --hash=sha256:409c4653e0f719fa78febcb71ac417076ae5e20160aec7270c91d009837b9151 \ + --hash=sha256:45a4f4cf4f4e6a55c8128f8b76b4c057027b27d4c67e3fe157fa02f27e37830d \ + --hash=sha256:48eab46ef38faf1031e58dfcc9c3e71756a1108f4c9c966150b605d4a1a7f659 \ + --hash=sha256:6b9e0ae298ab20d371fc26e2129fd683cfc0cfde4d157c6341722de645146537 \ + --hash=sha256:6c4778afe50f413707f604828c1ad1ff81fadf6c110cb669579dea7e2e98a75e \ + --hash=sha256:8c33fb99025d353c9520141f8bc989c2134a1f76bac6369cea060812f5b5c2bb \ + --hash=sha256:9873a1760a274b620a135054b756f9f218fa61ca030e42df31b409f0fb738b6c \ + --hash=sha256:9b069768c627f3f5623b1cbd3248c5e7e92aec62f4c98827059eed7053138cc9 \ + --hash=sha256:9e4ce27a507e4886efbd3c32d120db5089b906979a4debf1d5939ec01b9dd6c5 \ + --hash=sha256:acb424eaca214cb08735f1a744eceb97d014de6530c1ea23beb86d9c6f13c2ad \ + --hash=sha256:c8181c7d77388fe26ab8418bb088b1a1ef5fde058c6926790c8a0a3d94075a4a \ + --hash=sha256:d4afbb0840f489b60f5a580a41a1b9c3622e08ecb5eec8614d4fb4cd914c4460 \ + --hash=sha256:d9ed28030797c00f4bc43c86bf819266c76a5ea61d006cd4078a93ebf7da6bfd \ + --hash=sha256:e603aa7bb52e4e8ed4119a58a03b60323918467ef209e6ff9db3ac382e5cf2c6 \ + # via pypsrp +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \ + # via botocore +idna==2.8 \ + --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ + --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \ + # via requests +jmespath==0.9.4 \ + --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \ + --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \ + # via boto3, botocore +ntlm-auth==1.2.0 \ + --hash=sha256:7bc02a3fbdfee7275d3dc20fce8028ed8eb6d32364637f28be9e9ae9160c6d5c \ + --hash=sha256:9b13eaf88f16a831637d75236a93d60c0049536715aafbf8190ba58a590b023e \ + # via pypsrp +pycparser==2.19 \ + --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3 \ + # via cffi +pypsrp==0.3.1 \ + --hash=sha256:309853380fe086090a03cc6662a778ee69b1cae355ae4a932859034fd76e9d0b \ + --hash=sha256:90f946254f547dc3493cea8493c819ab87e152a755797c93aa2668678ba8ae85 +python-dateutil==2.8.0 \ + --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \ + --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \ + # via botocore +requests==2.21.0 \ + --hash=sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e \ + --hash=sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b \ + # via pypsrp +s3transfer==0.2.0 \ + --hash=sha256:7b9ad3213bff7d357f888e0fab5101b56fa1a0548ee77d121c3a3dbfbef4cb2e \ + --hash=sha256:f23d5cb7d862b104401d9021fc82e5fa0e0cf57b7660a1331425aab0c691d021 \ + # via boto3 +six==1.12.0 \ + --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ + --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ + # via cryptography, pypsrp, python-dateutil +urllib3==1.24.1 \ + --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ + --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \ + # via botocore, requests diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/automation/requirements.txt.in --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/automation/requirements.txt.in Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,2 @@ +boto3 +pypsrp diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/base-revsets.txt --- a/contrib/base-revsets.txt Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/base-revsets.txt Wed Apr 17 13:41:18 2019 -0400 @@ -47,3 +47,6 @@ # The one below is used by rebase (children(ancestor(tip~5, tip)) and ::(tip~5)):: heads(commonancestors(last(head(), 2))) +heads(-10000:-1) +roots(-10000:-1) +only(max(head()), min(head())) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/bdiff-torture.py --- a/contrib/bdiff-torture.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/bdiff-torture.py Wed Apr 17 13:41:18 2019 -0400 @@ -25,7 +25,7 @@ try: test1(a, b) - except Exception as inst: + except Exception: reductions += 1 tries = 0 a = a2 diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/check-code.py --- a/contrib/check-code.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-code.py Wed Apr 17 13:41:18 2019 -0400 @@ -40,6 +40,8 @@ except ImportError: re2 = None +import testparseutil + def compilere(pat, multiline=False): if multiline: pat = '(?m)' + pat @@ -231,8 +233,10 @@ (r"( +)(#([^!][^\n]*\S)?)", repcomment), ] -pypats = [ +# common patterns to check *.py +commonpypats = [ [ + (r'\\$', 'Use () to wrap long lines in Python, not \\'), (r'^\s*def\s*\w+\s*\(.*,\s*\(', "tuple parameter unpacking not available in Python 3+"), (r'lambda\s*\(.*,.*\)', @@ -261,7 +265,6 @@ # a pass at the same indent level, which is bogus r'(?P=indent)pass[ \t\n#]' ), 'omit superfluous pass'), - (r'.{81}', "line too long"), (r'[^\n]\Z', "no trailing newline"), (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), # (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=', @@ -299,7 +302,6 @@ "wrong whitespace around ="), (r'\([^()]*( =[^=]|[^<>!=]= )', "no whitespace around = for named parameters"), - (r'raise Exception', "don't raise generic exceptions"), (r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$', "don't use old-style two-argument raise, use Exception(message)"), (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), @@ -315,21 +317,12 @@ "use opener.read() instead"), (r'opener\([^)]*\).write\(', "use opener.write() instead"), - (r'[\s\(](open|file)\([^)]*\)\.read\(', - "use util.readfile() instead"), - (r'[\s\(](open|file)\([^)]*\)\.write\(', - "use util.writefile() instead"), - (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', - "always assign an opened file to a variable, and close it afterwards"), - (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', - "always assign an opened file to a variable, and close it afterwards"), (r'(?i)descend[e]nt', "the proper spelling is descendAnt"), (r'\.debug\(\_', "don't mark debug messages for translation"), (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"), (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'), (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,', 'legacy exception syntax; use "as" instead of ","'), - (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), (r'release\(.*wlock, .*lock\)', "wrong lock release order"), (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"), (r'os\.path\.join\(.*, *(""|\'\')\)', @@ -339,7 +332,6 @@ (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"), (r'\butil\.Abort\b', "directly use error.Abort"), (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"), - (r'^import atexit', "don't use atexit, use ui.atexit"), (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + " "pycompat.queue.Empty"), (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"), @@ -358,6 +350,34 @@ "don't convert rev to node before passing to revision(nodeorrev)"), (r'platform\.system\(\)', "don't use platform.system(), use pycompat"), + ], + # warnings + [ + ] +] + +# patterns to check normal *.py files +pypats = [ + [ + # Ideally, these should be placed in "commonpypats" for + # consistency of coding rules in Mercurial source tree. + # But on the other hand, these are not so seriously required for + # python code fragments embedded in test scripts. Fixing test + # scripts for these patterns requires many changes, and has less + # profit than effort. + (r'.{81}', "line too long"), + (r'raise Exception', "don't raise generic exceptions"), + (r'[\s\(](open|file)\([^)]*\)\.read\(', + "use util.readfile() instead"), + (r'[\s\(](open|file)\([^)]*\)\.write\(', + "use util.writefile() instead"), + (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', + "always assign an opened file to a variable, and close it afterwards"), + (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', + "always assign an opened file to a variable, and close it afterwards"), + (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), + (r'^import atexit', "don't use atexit, use ui.atexit"), + # rules depending on implementation of repquote() (r' x+[xpqo%APM][\'"]\n\s+[\'"]x', 'string join across lines with no space'), @@ -376,21 +396,35 @@ # because _preparepats forcibly adds "\n" into [^...], # even though this regexp wants match it against "\n")''', "missing _() in ui message (use () to hide false-positives)"), - ], + ] + commonpypats[0], # warnings [ # rules depending on implementation of repquote() (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"), - ] + ] + commonpypats[1] ] -pyfilters = [ +# patterns to check *.py for embedded ones in test script +embeddedpypats = [ + [ + ] + commonpypats[0], + # warnings + [ + ] + commonpypats[1] +] + +# common filters to convert *.py +commonpyfilters = [ (r"""(?msx)(?P\#.*?$)| ((?P('''|\"\"\"|(?(([^\\]|\\.)*?)) (?P=quote))""", reppython), ] +# filters to convert normal *.py files +pyfilters = [ +] + commonpyfilters + # non-filter patterns pynfpats = [ [ @@ -403,6 +437,10 @@ [], ] +# filters to convert *.py for embedded ones in test script +embeddedpyfilters = [ +] + commonpyfilters + # extension non-filter patterns pyextnfpats = [ [(r'^"""\n?[A-Z]', "don't capitalize docstring title")], @@ -414,7 +452,7 @@ txtpats = [ [ - ('\s$', 'trailing whitespace'), + (r'\s$', 'trailing whitespace'), ('.. note::[ \n][^\n]', 'add two newlines after note::') ], [] @@ -537,9 +575,17 @@ allfilesfilters, allfilespats), ] +# (desc, +# func to pick up embedded code fragments, +# list of patterns to convert target files +# list of patterns to detect errors/warnings) +embeddedchecks = [ + ('embedded python', + testparseutil.pyembedded, embeddedpyfilters, embeddedpypats) +] + def _preparepats(): - for c in checks: - failandwarn = c[-1] + def preparefailandwarn(failandwarn): for pats in failandwarn: for i, pseq in enumerate(pats): # fix-up regexes for multi-line searches @@ -553,10 +599,19 @@ p = re.sub(r'(?= curmaxerr: + break + curmaxerr -= fc + + return result + +def _checkfiledata(name, f, filedata, filters, pats, context, + logfunc, maxerr, warnings, blame, debug, lineno, + offset=None): + """Execute actual error check for file data + + :name: of the checking category + :f: filepath + :filedata: content of a file + :filters: to be applied before checking + :pats: to detect errors + :context: a dict of information shared while single checkfile() invocation + Valid keys: 'blamecache'. + :logfunc: function used to report error + logfunc(filename, linenumber, linecontent, errormessage) + :maxerr: number of error to display before aborting, or False to + report all errors + :warnings: whether warning level checks should be applied + :blame: whether blame information should be displayed at error reporting + :debug: whether debug information should be displayed + :lineno: whether lineno should be displayed at error reporting + :offset: line number offset of 'filedata' in 'f' for checking + an embedded code fragment, or None (offset=0 is different + from offset=None) + + returns number of detected errors. + """ + blamecache = context['blamecache'] + if offset is None: + lineoffset = 0 + else: + lineoffset = offset + + fc = 0 + pre = post = filedata + + if True: # TODO: get rid of this redundant 'if' block for p, r in filters: post = re.sub(p, r, post) nerrs = len(pats[0]) # nerr elements are errors @@ -679,20 +802,30 @@ if ignore and re.search(ignore, l, re.MULTILINE): if debug: print("Skipping %s for %s:%s (ignore pattern)" % ( - name, f, n)) + name, f, (n + lineoffset))) continue bd = "" if blame: bd = 'working directory' - if not blamecache: + if blamecache is None: blamecache = getblame(f) - if n < len(blamecache): - bl, bu, br = blamecache[n] - if bl == l: + context['blamecache'] = blamecache + if (n + lineoffset) < len(blamecache): + bl, bu, br = blamecache[(n + lineoffset)] + if offset is None and bl == l: bd = '%s@%s' % (bu, br) + elif offset is not None and bl.endswith(l): + # "offset is not None" means "checking + # embedded code fragment". In this case, + # "l" does not have information about the + # beginning of an *original* line in the + # file (e.g. ' > '). + # Therefore, use "str.endswith()", and + # show "maybe" for a little loose + # examination. + bd = '%s@%s, maybe' % (bu, br) - errors.append((f, lineno and n + 1, l, msg, bd)) - result = False + errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd)) errors.sort() for e in errors: @@ -702,7 +835,7 @@ print(" (too many errors, giving up)") break - return result + return fc def main(): parser = optparse.OptionParser("%prog [options] [files | -]") diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/check-commit --- a/contrib/check-commit Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-commit Wed Apr 17 13:41:18 2019 -0400 @@ -47,7 +47,7 @@ "adds a function with foo_bar naming"), ] -word = re.compile('\S') +word = re.compile(r'\S') def nonempty(first, second): if word.search(first): return first diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/check-config.py --- a/contrib/check-config.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-config.py Wed Apr 17 13:41:18 2019 -0400 @@ -25,7 +25,7 @@ (?:default=)?(?P\S+?))? \)''', re.VERBOSE | re.MULTILINE) -configwithre = re.compile(b''' +configwithre = re.compile(br''' ui\.config(?Pwith)\( # First argument is callback function. This doesn't parse robustly # if it is e.g. a function call. @@ -61,10 +61,10 @@ linenum += 1 # check topic-like bits - m = re.match(b'\s*``(\S+)``', l) + m = re.match(br'\s*``(\S+)``', l) if m: prevname = m.group(1) - if re.match(b'^\s*-+$', l): + if re.match(br'^\s*-+$', l): sect = prevname prevname = b'' diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/check-py3-compat.py --- a/contrib/check-py3-compat.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/check-py3-compat.py Wed Apr 17 13:41:18 2019 -0400 @@ -14,6 +14,7 @@ import os import sys import traceback +import warnings def check_compat_py2(f): """Check Python 3 compatibility for a file with Python 2""" @@ -45,7 +46,7 @@ content = fh.read() try: - ast.parse(content) + ast.parse(content, filename=f) except SyntaxError as e: print('%s: invalid syntax: %s' % (f, e)) return @@ -91,6 +92,11 @@ fn = check_compat_py3 for f in sys.argv[1:]: - fn(f) + with warnings.catch_warnings(record=True) as warns: + fn(f) + + for w in warns: + print(warnings.formatwarning(w.message, w.category, + w.filename, w.lineno).rstrip()) sys.exit(0) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/chg/hgclient.c --- a/contrib/chg/hgclient.c Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/chg/hgclient.c Wed Apr 17 13:41:18 2019 -0400 @@ -84,8 +84,9 @@ static void enlargecontext(context_t *ctx, size_t newsize) { - if (newsize <= ctx->maxdatasize) + if (newsize <= ctx->maxdatasize) { return; + } newsize = defaultdatasize * ((newsize + defaultdatasize - 1) / defaultdatasize); @@ -117,22 +118,25 @@ uint32_t datasize_n; rsize = recv(hgc->sockfd, &datasize_n, sizeof(datasize_n), 0); - if (rsize != sizeof(datasize_n)) + if (rsize != sizeof(datasize_n)) { abortmsg("failed to read data size"); + } /* datasize denotes the maximum size to write if input request */ hgc->ctx.datasize = ntohl(datasize_n); enlargecontext(&hgc->ctx, hgc->ctx.datasize); - if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') + if (isupper(hgc->ctx.ch) && hgc->ctx.ch != 'S') { return; /* assumes input request */ + } size_t cursize = 0; while (cursize < hgc->ctx.datasize) { rsize = recv(hgc->sockfd, hgc->ctx.data + cursize, hgc->ctx.datasize - cursize, 0); - if (rsize < 1) + if (rsize < 1) { abortmsg("failed to read data block"); + } cursize += rsize; } } @@ -143,8 +147,9 @@ const char *const endp = p + datasize; while (p < endp) { ssize_t r = send(sockfd, p, endp - p, 0); - if (r < 0) + if (r < 0) { abortmsgerrno("cannot communicate"); + } p += r; } } @@ -186,8 +191,9 @@ ctx->datasize += n; } - if (ctx->datasize > 0) + if (ctx->datasize > 0) { --ctx->datasize; /* strip last '\0' */ + } } /* Extract '\0'-separated list of args to new buffer, terminated by NULL */ @@ -205,8 +211,9 @@ args[nargs] = s; nargs++; s = memchr(s, '\0', e - s); - if (!s) + if (!s) { break; + } s++; } args[nargs] = NULL; @@ -225,8 +232,9 @@ static void handlereadlinerequest(hgclient_t *hgc) { context_t *ctx = &hgc->ctx; - if (!fgets(ctx->data, ctx->datasize, stdin)) + if (!fgets(ctx->data, ctx->datasize, stdin)) { ctx->data[0] = '\0'; + } ctx->datasize = strlen(ctx->data); writeblock(hgc); } @@ -239,8 +247,9 @@ ctx->data[ctx->datasize] = '\0'; /* terminate last string */ const char **args = unpackcmdargsnul(ctx); - if (!args[0] || !args[1] || !args[2]) + if (!args[0] || !args[1] || !args[2]) { abortmsg("missing type or command or cwd in system request"); + } if (strcmp(args[0], "system") == 0) { debugmsg("run '%s' at '%s'", args[1], args[2]); int32_t r = runshellcmd(args[1], args + 3, args[2]); @@ -252,8 +261,9 @@ writeblock(hgc); } else if (strcmp(args[0], "pager") == 0) { setuppager(args[1], args + 3); - if (hgc->capflags & CAP_ATTACHIO) + if (hgc->capflags & CAP_ATTACHIO) { attachio(hgc); + } /* unblock the server */ static const char emptycmd[] = "\n"; sendall(hgc->sockfd, emptycmd, sizeof(emptycmd) - 1); @@ -296,9 +306,10 @@ handlesystemrequest(hgc); break; default: - if (isupper(ctx->ch)) + if (isupper(ctx->ch)) { abortmsg("cannot handle response (ch = %c)", ctx->ch); + } } } } @@ -308,8 +319,9 @@ unsigned int flags = 0; while (s < e) { const char *t = strchr(s, ' '); - if (!t || t > e) + if (!t || t > e) { t = e; + } const cappair_t *cap; for (cap = captable; cap->flag; ++cap) { size_t n = t - s; @@ -346,11 +358,13 @@ const char *const dataend = ctx->data + ctx->datasize; while (s < dataend) { const char *t = strchr(s, ':'); - if (!t || t[1] != ' ') + if (!t || t[1] != ' ') { break; + } const char *u = strchr(t + 2, '\n'); - if (!u) + if (!u) { u = dataend; + } if (strncmp(s, "capabilities:", t - s + 1) == 0) { hgc->capflags = parsecapabilities(t + 2, u); } else if (strncmp(s, "pgid:", t - s + 1) == 0) { @@ -367,8 +381,9 @@ { int r = snprintf(hgc->ctx.data, hgc->ctx.maxdatasize, "chg[worker/%d]", (int)getpid()); - if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) + if (r < 0 || (size_t)r >= hgc->ctx.maxdatasize) { abortmsg("insufficient buffer to write procname (r = %d)", r); + } hgc->ctx.datasize = (size_t)r; writeblockrequest(hgc, "setprocname"); } @@ -380,8 +395,9 @@ sendall(hgc->sockfd, chcmd, sizeof(chcmd) - 1); readchannel(hgc); context_t *ctx = &hgc->ctx; - if (ctx->ch != 'I') + if (ctx->ch != 'I') { abortmsg("unexpected response for attachio (ch = %c)", ctx->ch); + } static const int fds[3] = {STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO}; struct msghdr msgh; @@ -399,23 +415,27 @@ memcpy(CMSG_DATA(cmsg), fds, sizeof(fds)); msgh.msg_controllen = cmsg->cmsg_len; ssize_t r = sendmsg(hgc->sockfd, &msgh, 0); - if (r < 0) + if (r < 0) { abortmsgerrno("sendmsg failed"); + } handleresponse(hgc); int32_t n; - if (ctx->datasize != sizeof(n)) + if (ctx->datasize != sizeof(n)) { abortmsg("unexpected size of attachio result"); + } memcpy(&n, ctx->data, sizeof(n)); n = ntohl(n); - if (n != sizeof(fds) / sizeof(fds[0])) + if (n != sizeof(fds) / sizeof(fds[0])) { abortmsg("failed to send fds (n = %d)", n); + } } static void chdirtocwd(hgclient_t *hgc) { - if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) + if (!getcwd(hgc->ctx.data, hgc->ctx.maxdatasize)) { abortmsgerrno("failed to getcwd"); + } hgc->ctx.datasize = strlen(hgc->ctx.data); writeblockrequest(hgc, "chdir"); } @@ -440,8 +460,9 @@ hgclient_t *hgc_open(const char *sockname) { int fd = socket(AF_UNIX, SOCK_STREAM, 0); - if (fd < 0) + if (fd < 0) { abortmsgerrno("cannot create socket"); + } /* don't keep fd on fork(), so that it can be closed when the parent * process get terminated. */ @@ -456,34 +477,39 @@ { const char *split = strrchr(sockname, '/'); if (split && split != sockname) { - if (split[1] == '\0') + if (split[1] == '\0') { abortmsg("sockname cannot end with a slash"); + } size_t len = split - sockname; char sockdir[len + 1]; memcpy(sockdir, sockname, len); sockdir[len] = '\0'; bakfd = open(".", O_DIRECTORY); - if (bakfd == -1) + if (bakfd == -1) { abortmsgerrno("cannot open cwd"); + } int r = chdir(sockdir); - if (r != 0) + if (r != 0) { abortmsgerrno("cannot chdir %s", sockdir); + } basename = split + 1; } } - if (strlen(basename) >= sizeof(addr.sun_path)) + if (strlen(basename) >= sizeof(addr.sun_path)) { abortmsg("sockname is too long: %s", basename); + } strncpy(addr.sun_path, basename, sizeof(addr.sun_path)); addr.sun_path[sizeof(addr.sun_path) - 1] = '\0'; /* real connect */ int r = connect(fd, (struct sockaddr *)&addr, sizeof(addr)); if (r < 0) { - if (errno != ENOENT && errno != ECONNREFUSED) + if (errno != ENOENT && errno != ECONNREFUSED) { abortmsgerrno("cannot connect to %s", sockname); + } } if (bakfd != -1) { fchdirx(bakfd); @@ -501,16 +527,21 @@ initcontext(&hgc->ctx); readhello(hgc); - if (!(hgc->capflags & CAP_RUNCOMMAND)) + if (!(hgc->capflags & CAP_RUNCOMMAND)) { abortmsg("insufficient capability: runcommand"); - if (hgc->capflags & CAP_SETPROCNAME) + } + if (hgc->capflags & CAP_SETPROCNAME) { updateprocname(hgc); - if (hgc->capflags & CAP_ATTACHIO) + } + if (hgc->capflags & CAP_ATTACHIO) { attachio(hgc); - if (hgc->capflags & CAP_CHDIR) + } + if (hgc->capflags & CAP_CHDIR) { chdirtocwd(hgc); - if (hgc->capflags & CAP_SETUMASK2) + } + if (hgc->capflags & CAP_SETUMASK2) { forwardumask(hgc); + } return hgc; } @@ -555,16 +586,18 @@ size_t argsize) { assert(hgc); - if (!(hgc->capflags & CAP_VALIDATE)) + if (!(hgc->capflags & CAP_VALIDATE)) { return NULL; + } packcmdargs(&hgc->ctx, args, argsize); writeblockrequest(hgc, "validate"); handleresponse(hgc); /* the server returns '\0' if it can handle our request */ - if (hgc->ctx.datasize <= 1) + if (hgc->ctx.datasize <= 1) { return NULL; + } /* make sure the buffer is '\0' terminated */ enlargecontext(&hgc->ctx, hgc->ctx.datasize + 1); @@ -599,8 +632,9 @@ void hgc_attachio(hgclient_t *hgc) { assert(hgc); - if (!(hgc->capflags & CAP_ATTACHIO)) + if (!(hgc->capflags & CAP_ATTACHIO)) { return; + } attachio(hgc); } @@ -613,8 +647,9 @@ void hgc_setenv(hgclient_t *hgc, const char *const envp[]) { assert(hgc && envp); - if (!(hgc->capflags & CAP_SETENV)) + if (!(hgc->capflags & CAP_SETENV)) { return; + } packcmdargs(&hgc->ctx, envp, /*argsize*/ -1); writeblockrequest(hgc, "setenv"); } diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/chg/procutil.c --- a/contrib/chg/procutil.c Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/chg/procutil.c Wed Apr 17 13:41:18 2019 -0400 @@ -25,8 +25,9 @@ static void forwardsignal(int sig) { assert(peerpid > 0); - if (kill(peerpid, sig) < 0) + if (kill(peerpid, sig) < 0) { abortmsgerrno("cannot kill %d", peerpid); + } debugmsg("forward signal %d", sig); } @@ -34,8 +35,9 @@ { /* prefer kill(-pgid, sig), fallback to pid if pgid is invalid */ pid_t killpid = peerpgid > 1 ? -peerpgid : peerpid; - if (kill(killpid, sig) < 0) + if (kill(killpid, sig) < 0) { abortmsgerrno("cannot kill %d", killpid); + } debugmsg("forward signal %d to %d", sig, killpid); } @@ -43,28 +45,36 @@ { sigset_t unblockset, oldset; struct sigaction sa, oldsa; - if (sigemptyset(&unblockset) < 0) + if (sigemptyset(&unblockset) < 0) { goto error; - if (sigaddset(&unblockset, sig) < 0) + } + if (sigaddset(&unblockset, sig) < 0) { goto error; + } memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { goto error; + } forwardsignal(sig); - if (raise(sig) < 0) /* resend to self */ + if (raise(sig) < 0) { /* resend to self */ goto error; - if (sigaction(sig, &sa, &oldsa) < 0) + } + if (sigaction(sig, &sa, &oldsa) < 0) { goto error; - if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) + } + if (sigprocmask(SIG_UNBLOCK, &unblockset, &oldset) < 0) { goto error; + } /* resent signal will be handled before sigprocmask() returns */ - if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) + if (sigprocmask(SIG_SETMASK, &oldset, NULL) < 0) { goto error; - if (sigaction(sig, &oldsa, NULL) < 0) + } + if (sigaction(sig, &oldsa, NULL) < 0) { goto error; + } return; error: @@ -73,19 +83,22 @@ static void handlechildsignal(int sig UNUSED_) { - if (peerpid == 0 || pagerpid == 0) + if (peerpid == 0 || pagerpid == 0) { return; + } /* if pager exits, notify the server with SIGPIPE immediately. * otherwise the server won't get SIGPIPE if it does not write * anything. (issue5278) */ - if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) + if (waitpid(pagerpid, NULL, WNOHANG) == pagerpid) { kill(peerpid, SIGPIPE); + } } void setupsignalhandler(pid_t pid, pid_t pgid) { - if (pid <= 0) + if (pid <= 0) { return; + } peerpid = pid; peerpgid = (pgid <= 1 ? 0 : pgid); @@ -98,42 +111,52 @@ * - SIGINT: usually generated by the terminal */ sa.sa_handler = forwardsignaltogroup; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { + goto error; + } + if (sigaction(SIGHUP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGHUP, &sa, NULL) < 0) + } + if (sigaction(SIGINT, &sa, NULL) < 0) { goto error; - if (sigaction(SIGINT, &sa, NULL) < 0) - goto error; + } /* terminate frontend by double SIGTERM in case of server freeze */ sa.sa_handler = forwardsignal; sa.sa_flags |= SA_RESETHAND; - if (sigaction(SIGTERM, &sa, NULL) < 0) + if (sigaction(SIGTERM, &sa, NULL) < 0) { goto error; + } /* notify the worker about window resize events */ sa.sa_flags = SA_RESTART; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + if (sigaction(SIGWINCH, &sa, NULL) < 0) { goto error; + } /* forward user-defined signals */ - if (sigaction(SIGUSR1, &sa, NULL) < 0) + if (sigaction(SIGUSR1, &sa, NULL) < 0) { goto error; - if (sigaction(SIGUSR2, &sa, NULL) < 0) + } + if (sigaction(SIGUSR2, &sa, NULL) < 0) { goto error; + } /* propagate job control requests to worker */ sa.sa_handler = forwardsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCONT, &sa, NULL) < 0) + if (sigaction(SIGCONT, &sa, NULL) < 0) { goto error; + } sa.sa_handler = handlestopsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + if (sigaction(SIGTSTP, &sa, NULL) < 0) { goto error; + } /* get notified when pager exits */ sa.sa_handler = handlechildsignal; sa.sa_flags = SA_RESTART; - if (sigaction(SIGCHLD, &sa, NULL) < 0) + if (sigaction(SIGCHLD, &sa, NULL) < 0) { goto error; + } return; @@ -147,26 +170,34 @@ memset(&sa, 0, sizeof(sa)); sa.sa_handler = SIG_DFL; sa.sa_flags = SA_RESTART; - if (sigemptyset(&sa.sa_mask) < 0) + if (sigemptyset(&sa.sa_mask) < 0) { goto error; + } - if (sigaction(SIGHUP, &sa, NULL) < 0) + if (sigaction(SIGHUP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGTERM, &sa, NULL) < 0) + } + if (sigaction(SIGTERM, &sa, NULL) < 0) { goto error; - if (sigaction(SIGWINCH, &sa, NULL) < 0) + } + if (sigaction(SIGWINCH, &sa, NULL) < 0) { goto error; - if (sigaction(SIGCONT, &sa, NULL) < 0) + } + if (sigaction(SIGCONT, &sa, NULL) < 0) { goto error; - if (sigaction(SIGTSTP, &sa, NULL) < 0) + } + if (sigaction(SIGTSTP, &sa, NULL) < 0) { goto error; - if (sigaction(SIGCHLD, &sa, NULL) < 0) + } + if (sigaction(SIGCHLD, &sa, NULL) < 0) { goto error; + } /* ignore Ctrl+C while shutting down to make pager exits cleanly */ sa.sa_handler = SIG_IGN; - if (sigaction(SIGINT, &sa, NULL) < 0) + if (sigaction(SIGINT, &sa, NULL) < 0) { goto error; + } peerpid = 0; return; @@ -180,22 +211,27 @@ pid_t setuppager(const char *pagercmd, const char *envp[]) { assert(pagerpid == 0); - if (!pagercmd) + if (!pagercmd) { return 0; + } int pipefds[2]; - if (pipe(pipefds) < 0) + if (pipe(pipefds) < 0) { return 0; + } pid_t pid = fork(); - if (pid < 0) + if (pid < 0) { goto error; + } if (pid > 0) { close(pipefds[0]); - if (dup2(pipefds[1], fileno(stdout)) < 0) + if (dup2(pipefds[1], fileno(stdout)) < 0) { goto error; + } if (isatty(fileno(stderr))) { - if (dup2(pipefds[1], fileno(stderr)) < 0) + if (dup2(pipefds[1], fileno(stderr)) < 0) { goto error; + } } close(pipefds[1]); pagerpid = pid; @@ -222,16 +258,18 @@ void waitpager(void) { - if (pagerpid == 0) + if (pagerpid == 0) { return; + } /* close output streams to notify the pager its input ends */ fclose(stdout); fclose(stderr); while (1) { pid_t ret = waitpid(pagerpid, NULL, 0); - if (ret == -1 && errno == EINTR) + if (ret == -1 && errno == EINTR) { continue; + } break; } } diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/chg/util.c --- a/contrib/chg/util.c Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/chg/util.c Wed Apr 17 13:41:18 2019 -0400 @@ -25,8 +25,9 @@ static inline void fsetcolor(FILE *fp, const char *code) { - if (!colorenabled) + if (!colorenabled) { return; + } fprintf(fp, "\033[%sm", code); } @@ -35,8 +36,9 @@ fsetcolor(stderr, "1;31"); fputs("chg: abort: ", stderr); vfprintf(stderr, fmt, args); - if (no != 0) + if (no != 0) { fprintf(stderr, " (errno = %d, %s)", no, strerror(no)); + } fsetcolor(stderr, ""); fputc('\n', stderr); exit(255); @@ -82,8 +84,9 @@ void debugmsg(const char *fmt, ...) { - if (!debugmsgenabled) + if (!debugmsgenabled) { return; + } va_list args; va_start(args, fmt); @@ -98,32 +101,37 @@ void fchdirx(int dirfd) { int r = fchdir(dirfd); - if (r == -1) + if (r == -1) { abortmsgerrno("failed to fchdir"); + } } void fsetcloexec(int fd) { int flags = fcntl(fd, F_GETFD); - if (flags < 0) + if (flags < 0) { abortmsgerrno("cannot get flags of fd %d", fd); - if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) + } + if (fcntl(fd, F_SETFD, flags | FD_CLOEXEC) < 0) { abortmsgerrno("cannot set flags of fd %d", fd); + } } void *mallocx(size_t size) { void *result = malloc(size); - if (!result) + if (!result) { abortmsg("failed to malloc"); + } return result; } void *reallocx(void *ptr, size_t size) { void *result = realloc(ptr, size); - if (!result) + if (!result) { abortmsg("failed to realloc"); + } return result; } @@ -144,30 +152,37 @@ memset(&newsa, 0, sizeof(newsa)); newsa.sa_handler = SIG_IGN; newsa.sa_flags = 0; - if (sigemptyset(&newsa.sa_mask) < 0) + if (sigemptyset(&newsa.sa_mask) < 0) { goto done; - if (sigaction(SIGINT, &newsa, &oldsaint) < 0) + } + if (sigaction(SIGINT, &newsa, &oldsaint) < 0) { goto done; + } doneflags |= F_SIGINT; - if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) + if (sigaction(SIGQUIT, &newsa, &oldsaquit) < 0) { goto done; + } doneflags |= F_SIGQUIT; - if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) + if (sigaddset(&newsa.sa_mask, SIGCHLD) < 0) { goto done; - if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) + } + if (sigprocmask(SIG_BLOCK, &newsa.sa_mask, &oldmask) < 0) { goto done; + } doneflags |= F_SIGMASK; pid_t pid = fork(); - if (pid < 0) + if (pid < 0) { goto done; + } if (pid == 0) { sigaction(SIGINT, &oldsaint, NULL); sigaction(SIGQUIT, &oldsaquit, NULL); sigprocmask(SIG_SETMASK, &oldmask, NULL); - if (cwd && chdir(cwd) < 0) + if (cwd && chdir(cwd) < 0) { _exit(127); + } const char *argv[] = {"sh", "-c", cmd, NULL}; if (envp) { execve("/bin/sh", (char **)argv, (char **)envp); @@ -176,25 +191,32 @@ } _exit(127); } else { - if (waitpid(pid, &status, 0) < 0) + if (waitpid(pid, &status, 0) < 0) { goto done; + } doneflags |= F_WAITPID; } done: - if (doneflags & F_SIGINT) + if (doneflags & F_SIGINT) { sigaction(SIGINT, &oldsaint, NULL); - if (doneflags & F_SIGQUIT) + } + if (doneflags & F_SIGQUIT) { sigaction(SIGQUIT, &oldsaquit, NULL); - if (doneflags & F_SIGMASK) + } + if (doneflags & F_SIGMASK) { sigprocmask(SIG_SETMASK, &oldmask, NULL); + } /* no way to report other errors, use 127 (= shell termination) */ - if (!(doneflags & F_WAITPID)) + if (!(doneflags & F_WAITPID)) { return 127; - if (WIFEXITED(status)) + } + if (WIFEXITED(status)) { return WEXITSTATUS(status); - if (WIFSIGNALED(status)) + } + if (WIFSIGNALED(status)) { return -WTERMSIG(status); + } return 127; } diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/clang-format-ignorelist --- a/contrib/clang-format-ignorelist Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/clang-format-ignorelist Wed Apr 17 13:41:18 2019 -0400 @@ -62,6 +62,11 @@ contrib/python-zstandard/zstd/compress/zstd_opt.c contrib/python-zstandard/zstd/compress/zstd_opt.h contrib/python-zstandard/zstd/decompress/huf_decompress.c +contrib/python-zstandard/zstd/decompress/zstd_ddict.c +contrib/python-zstandard/zstd/decompress/zstd_ddict.h +contrib/python-zstandard/zstd/decompress/zstd_decompress_block.c +contrib/python-zstandard/zstd/decompress/zstd_decompress_block.h +contrib/python-zstandard/zstd/decompress/zstd_decompress_internal.h contrib/python-zstandard/zstd/decompress/zstd_decompress.c contrib/python-zstandard/zstd/deprecated/zbuff_common.c contrib/python-zstandard/zstd/deprecated/zbuff_compress.c diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/debugshell.py --- a/contrib/debugshell.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/debugshell.py Wed Apr 17 13:41:18 2019 -0400 @@ -7,6 +7,7 @@ import sys from mercurial import ( demandimport, + pycompat, registrar, ) @@ -32,28 +33,30 @@ IPython.embed() -@command('debugshell|dbsh', []) +@command(b'debugshell|dbsh', []) def debugshell(ui, repo, **opts): - bannermsg = "loaded repo : %s\n" \ - "using source: %s" % (repo.root, - mercurial.__path__[0]) + bannermsg = ("loaded repo : %s\n" + "using source: %s" % (pycompat.sysstr(repo.root), + mercurial.__path__[0])) pdbmap = { 'pdb' : 'code', 'ipdb' : 'IPython' } - debugger = ui.config("ui", "debugger") + debugger = ui.config(b"ui", b"debugger") if not debugger: debugger = 'pdb' + else: + debugger = pycompat.sysstr(debugger) # if IPython doesn't exist, fallback to code.interact try: with demandimport.deactivated(): __import__(pdbmap[debugger]) except ImportError: - ui.warn(("%s debugger specified but %s module was not found\n") + ui.warn((b"%s debugger specified but %s module was not found\n") % (debugger, pdbmap[debugger])) - debugger = 'pdb' + debugger = b'pdb' getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/discovery-helper.sh --- a/contrib/discovery-helper.sh Tue Mar 19 09:23:35 2019 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,64 +0,0 @@ -#!/bin/bash -# -# produces two repositories with different common and missing subsets -# -# $ discovery-helper.sh REPO NBHEADS DEPT -# -# The Goal is to produce two repositories with some common part and some -# exclusive part on each side. Provide a source repository REPO, it will -# produce two repositories REPO-left and REPO-right. -# -# Each repository will be missing some revisions exclusive to NBHEADS of the -# repo topological heads. These heads and revisions exclusive to them (up to -# DEPTH depth) are stripped. -# -# The "left" repository will use the NBHEADS first heads (sorted by -# description). The "right" use the last NBHEADS one. -# -# To find out how many topological heads a repo has, use: -# -# $ hg heads -t -T '{rev}\n' | wc -l -# -# Example: -# -# The `pypy-2018-09-01` repository has 192 heads. To produce two repositories -# with 92 common heads and ~50 exclusive heads on each side. -# -# $ ./discovery-helper.sh pypy-2018-08-01 50 10 - -set -euo pipefail - -if [ $# -lt 3 ]; then - echo "usage: `basename $0` REPO NBHEADS DEPTH" - exit 64 -fi - -repo="$1" -shift - -nbheads="$1" -shift - -depth="$1" -shift - -leftrepo="${repo}-left" -rightrepo="${repo}-right" - -left="first(sort(heads(all()), 'desc'), $nbheads)" -right="last(sort(heads(all()), 'desc'), $nbheads)" - -leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" -rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" - -echo '### building left repository:' $left-repo -echo '# cloning' -hg clone --noupdate "${repo}" "${leftrepo}" -echo '# stripping' '"'${leftsubset}'"' -hg -R "${leftrepo}" --config extensions.strip= strip --rev "$leftsubset" --no-backup - -echo '### building right repository:' $right-repo -echo '# cloning' -hg clone --noupdate "${repo}" "${rightrepo}" -echo '# stripping:' '"'${rightsubset}'"' -hg -R "${rightrepo}" --config extensions.strip= strip --rev "$rightsubset" --no-backup diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/fuzz/manifest.cc --- a/contrib/fuzz/manifest.cc Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/fuzz/manifest.cc Wed Apr 17 13:41:18 2019 -0400 @@ -20,11 +20,19 @@ lm = lazymanifest(mdata) # iterate the whole thing, which causes the code to fully parse # every line in the manifest - list(lm.iterentries()) + for e, _, _ in lm.iterentries(): + # also exercise __getitem__ et al + lm[e] + e in lm + (e + 'nope') in lm lm[b'xyzzy'] = (b'\0' * 20, 'x') # do an insert, text should change assert lm.text() != mdata, "insert should change text and didn't: %r %r" % (lm.text(), mdata) + cloned = lm.filtercopy(lambda x: x != 'xyzzy') + assert cloned.text() == mdata, 'cloned text should equal mdata' + cloned.diff(lm) del lm[b'xyzzy'] + cloned.diff(lm) # should be back to the same assert lm.text() == mdata, "delete should have restored text but didn't: %r %r" % (lm.text(), mdata) except Exception as e: @@ -39,6 +47,11 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { + // Don't allow fuzzer inputs larger than 100k, since we'll just bog + // down and not accomplish much. + if (Size > 100000) { + return 0; + } PyObject *mtext = PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size); PyObject *locals = PyDict_New(); diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/fuzz/revlog.cc --- a/contrib/fuzz/revlog.cc Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/fuzz/revlog.cc Wed Apr 17 13:41:18 2019 -0400 @@ -19,6 +19,11 @@ for inline in (True, False): try: index, cache = parse_index2(data, inline) + index.slicechunktodensity(list(range(len(index))), 0.5, 262144) + for rev in range(len(index)): + node = index[rev][7] + partial = index.shortest(node) + index.partialmatch(node[:partial]) except Exception as e: pass # uncomment this print if you're editing this Python code @@ -31,6 +36,11 @@ int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) { + // Don't allow fuzzer inputs larger than 60k, since we'll just bog + // down and not accomplish much. + if (Size > 60000) { + return 0; + } PyObject *text = PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size); PyObject *locals = PyDict_New(); diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/hg-test-mode.el --- a/contrib/hg-test-mode.el Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/hg-test-mode.el Wed Apr 17 13:41:18 2019 -0400 @@ -53,4 +53,45 @@ (setq mode-name "hg-test") (run-hooks 'hg-test-mode-hook)) +(with-eval-after-load "compile" + ;; Link to Python sources in tracebacks in .t failures. + (add-to-list 'compilation-error-regexp-alist-alist + '(hg-test-output-python-tb + "^\\+ +File ['\"]\\([^'\"]+\\)['\"], line \\([0-9]+\\)," 1 2)) + (add-to-list 'compilation-error-regexp-alist 'hg-test-output-python-tb) + ;; Link to source files in test-check-code.t violations. + (add-to-list 'compilation-error-regexp-alist-alist + '(hg-test-check-code-output + "\\+ \\([^:\n]+\\):\\([0-9]+\\):$" 1 2)) + (add-to-list 'compilation-error-regexp-alist 'hg-test-check-code-output)) + +(defun hg-test-mode--test-one-error-line-regexp (test) + (erase-buffer) + (setq compilation-locs (make-hash-table)) + (insert (car test)) + (compilation-parse-errors (point-min) (point-max)) + (let ((msg (get-text-property 1 'compilation-message))) + (should msg) + (let ((loc (compilation--message->loc msg)) + (line (nth 1 test)) + (file (nth 2 test))) + (should (equal (compilation--loc->line loc) line)) + (should (equal (caar (compilation--loc->file-struct loc)) file))) + msg)) + +(require 'ert) +(ert-deftest hg-test-mode--compilation-mode-support () + "Test hg-specific compilation-mode regular expressions" + (require 'compile) + (with-temp-buffer + (font-lock-mode -1) + (mapc 'hg-test-mode--test-one-error-line-regexp + '( + ("+ contrib/debugshell.py:37:" 37 "contrib/debugshell.py") + ("+ File \"/tmp/hg/mercurial/commands.py\", line 3115, in help_" + 3115 "/tmp/hg/mercurial/commands.py") + ("+ File \"mercurial/dispatch.py\", line 225, in dispatch" + 225 "mercurial/dispatch.py"))))) + + (provide 'hg-test-mode) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/install-windows-dependencies.ps1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/install-windows-dependencies.ps1 Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,200 @@ +# install-dependencies.ps1 - Install Windows dependencies for building Mercurial +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# This script can be used to bootstrap a Mercurial build environment on +# Windows. +# +# The script makes a lot of assumptions about how things should work. +# For example, the install location of Python is hardcoded to c:\hgdev\*. +# +# The script should be executed from a PowerShell with elevated privileges +# if you don't want to see a UAC prompt for various installers. +# +# The script is tested on Windows 10 and Windows Server 2019 (in EC2). + +$VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe" +$VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139" + +$VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi" +$VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf" + +$PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.16/python-2.7.16.amd64.msi" +$PYTHON27_x64_SHA256 = "7c0f45993019152d46041a7db4b947b919558fdb7a8f67bcd0535bc98d42b603" +$PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.16/python-2.7.16.msi" +$PYTHON27_X86_SHA256 = "d57dc3e1ba490aee856c28b4915d09e3f49442461e46e481bc6b2d18207831d7" + +$PYTHON35_x86_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4.exe" +$PYTHON35_x86_SHA256 = "F27C2D67FD9688E4970F3BFF799BB9D722A0D6C2C13B04848E1F7D620B524B0E" +$PYTHON35_x64_URL = "https://www.python.org/ftp/python/3.5.4/python-3.5.4-amd64.exe" +$PYTHON35_x64_SHA256 = "9B7741CC32357573A77D2EE64987717E527628C38FD7EAF3E2AACA853D45A1EE" + +$PYTHON36_x86_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8.exe" +$PYTHON36_x86_SHA256 = "89871D432BC06E4630D7B64CB1A8451E53C80E68DE29029976B12AAD7DBFA5A0" +$PYTHON36_x64_URL = "https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe" +$PYTHON36_x64_SHA256 = "96088A58B7C43BC83B84E6B67F15E8706C614023DD64F9A5A14E81FF824ADADC" + +$PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.2/python-3.7.2.exe" +$PYTHON37_x86_SHA256 = "8BACE330FB409E428B04EEEE083DD9CA7F6C754366D07E23B3853891D8F8C3D0" +$PYTHON37_x64_URL = "https://www.python.org/ftp/python/3.7.2/python-3.7.2-amd64.exe" +$PYTHON37_x64_SHA256 = "0FE2A696F5A3E481FED795EF6896ED99157BCEF273EF3C4A96F2905CBDB3AA13" + +$PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0a2.exe" +$PYTHON38_x86_SHA256 = "013A7DDD317679FE51223DE627688CFCB2F0F1128FD25A987F846AEB476D3FEF" +$PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.0/python-3.8.0a2-amd64.exe" +$PYTHON38_X64_SHA256 = "560BC6D1A76BCD6D544AC650709F3892956890753CDCF9CE67E3D7302D76FB41" + +# PIP 19.0.3. +$PIP_URL = "https://github.com/pypa/get-pip/raw/fee32c376da1ff6496a798986d7939cd51e1644f/get-pip.py" +$PIP_SHA256 = "efe99298f3fbb1f56201ce6b81d2658067d2f7d7dfc2d412e0d3cacc9a397c61" + +$VIRTUALENV_URL = "https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/virtualenv-16.4.3.tar.gz" +$VIRTUALENV_SHA256 = "984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39" + +$INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe" +$INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538" + +$MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip" +$MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF" + +$MERCURIAL_WHEEL_FILENAME = "mercurial-4.9-cp27-cp27m-win_amd64.whl" +$MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/fe/e8/b872d53dfbbf986bdc46af0b30f580b227fb59bddd2587152a55e205b0cc/$MERCURIAL_WHEEL_FILENAME" +$MERCURIAL_WHEEL_SHA256 = "218cc2e7c3f1d535007febbb03351663897edf27df0e57d6842e3b686492b429" + +# Writing progress slows down downloads substantially. So disable it. +$progressPreference = 'silentlyContinue' + +function Secure-Download($url, $path, $sha256) { + if (Test-Path -Path $path) { + Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash + + if ($hash.Hash -eq $sha256) { + Write-Output "SHA256 of $path verified as $sha256" + return + } + + Write-Output "hash mismatch on $path; downloading again" + } + + Write-Output "downloading $url to $path" + Invoke-WebRequest -Uri $url -OutFile $path + Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash + + if ($hash.Hash -ne $sha256) { + Remove-Item -Path $path + throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256" + } +} + +function Invoke-Process($path, $arguments) { + $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden + + if ($p.ExitCode -ne 0) { + throw "process exited non-0: $($p.ExitCode)" + } +} + +function Install-Python3($name, $installer, $dest, $pip) { + Write-Output "installing $name" + + # We hit this when running the script as part of Simple Systems Manager in + # EC2. The Python 3 installer doesn't seem to like per-user installs + # when running as the SYSTEM user. So enable global installs if executed in + # this mode. + if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") { + Write-Output "running with SYSTEM account; installing for all users" + $allusers = "1" + } + else { + $allusers = "0" + } + + Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0" + Invoke-Process ${dest}\python.exe $pip +} + +function Install-Dependencies($prefix) { + if (!(Test-Path -Path $prefix\assets)) { + New-Item -Path $prefix\assets -ItemType Directory + } + + $pip = "${prefix}\assets\get-pip.py" + + Secure-Download $VC9_PYTHON_URL ${prefix}\assets\VCForPython27.msi $VC9_PYTHON_SHA256 + Secure-Download $PYTHON27_x86_URL ${prefix}\assets\python27-x86.msi $PYTHON27_x86_SHA256 + Secure-Download $PYTHON27_x64_URL ${prefix}\assets\python27-x64.msi $PYTHON27_x64_SHA256 + Secure-Download $PYTHON35_x86_URL ${prefix}\assets\python35-x86.exe $PYTHON35_x86_SHA256 + Secure-Download $PYTHON35_x64_URL ${prefix}\assets\python35-x64.exe $PYTHON35_x64_SHA256 + Secure-Download $PYTHON36_x86_URL ${prefix}\assets\python36-x86.exe $PYTHON36_x86_SHA256 + Secure-Download $PYTHON36_x64_URL ${prefix}\assets\python36-x64.exe $PYTHON36_x64_SHA256 + Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256 + Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256 + Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256 + Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256 + Secure-Download $PIP_URL ${pip} $PIP_SHA256 + Secure-Download $VIRTUALENV_URL ${prefix}\assets\virtualenv.tar.gz $VIRTUALENV_SHA256 + Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256 + Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256 + Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256 + Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256 + + Write-Output "installing Python 2.7 32-bit" + Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x86.msi /l* ${prefix}\assets\python27-x86.log /q TARGETDIR=${prefix}\python27-x86 ALLUSERS=" + Invoke-Process ${prefix}\python27-x86\python.exe ${prefix}\assets\get-pip.py + Invoke-Process ${prefix}\python27-x86\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" + + Write-Output "installing Python 2.7 64-bit" + Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x64.msi /l* ${prefix}\assets\python27-x64.log /q TARGETDIR=${prefix}\python27-x64 ALLUSERS=" + Invoke-Process ${prefix}\python27-x64\python.exe ${prefix}\assets\get-pip.py + Invoke-Process ${prefix}\python27-x64\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" + + Install-Python3 "Python 3.5 32-bit" ${prefix}\assets\python35-x86.exe ${prefix}\python35-x86 ${pip} + Install-Python3 "Python 3.5 64-bit" ${prefix}\assets\python35-x64.exe ${prefix}\python35-x64 ${pip} + Install-Python3 "Python 3.6 32-bit" ${prefix}\assets\python36-x86.exe ${prefix}\python36-x86 ${pip} + Install-Python3 "Python 3.6 64-bit" ${prefix}\assets\python36-x64.exe ${prefix}\python36-x64 ${pip} + Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip} + Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip} + Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip} + Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip} + + Write-Output "installing Visual Studio 2017 Build Tools and SDKs" + Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140" + + Write-Output "installing Visual C++ 9.0 for Python 2.7" + Invoke-Process msiexec.exe "/i ${prefix}\assets\VCForPython27.msi /l* ${prefix}\assets\VCForPython27.log /q" + + Write-Output "installing Inno Setup" + Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES" + + Write-Output "extracting MinGW base archive" + Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force + + Write-Output "updating MinGW package catalogs" + Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update" + + Write-Output "installing MinGW packages" + Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip" + + # Construct a virtualenv useful for bootstrapping. It conveniently contains a + # Mercurial install. + Write-Output "creating bootstrap virtualenv with Mercurial" + Invoke-Process "$prefix\python27-x64\Scripts\virtualenv.exe" "${prefix}\venv-bootstrap" + Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}" +} + +function Clone-Mercurial-Repo($prefix, $repo_url, $dest) { + Write-Output "cloning $repo_url to $dest" + # TODO Figure out why CA verification isn't working in EC2 and remove + # --insecure. + Invoke-Process "${prefix}\venv-bootstrap\Scripts\hg.exe" "clone --insecure $repo_url $dest" + + # Mark repo as non-publishing by default for convenience. + Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false" +} + +$prefix = "c:\hgdev" +Install-Dependencies $prefix +Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hg-docker --- a/contrib/packaging/hg-docker Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/packaging/hg-docker Wed Apr 17 13:41:18 2019 -0400 @@ -76,7 +76,7 @@ p.communicate(input=dockerfile) if p.returncode: raise subprocess.CalledProcessException( - p.returncode, 'failed to build docker image: %s %s' \ + p.returncode, 'failed to build docker image: %s %s' % (p.stdout, p.stderr)) def command_build(args): diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hgpackaging/__init__.py diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hgpackaging/downloads.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/downloads.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,175 @@ +# downloads.py - Code for downloading dependencies. +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import gzip +import hashlib +import pathlib +import urllib.request + + +DOWNLOADS = { + 'gettext': { + 'url': 'https://versaweb.dl.sourceforge.net/project/gnuwin32/gettext/0.14.4/gettext-0.14.4-bin.zip', + 'size': 1606131, + 'sha256': '60b9ef26bc5cceef036f0424e542106cf158352b2677f43a01affd6d82a1d641', + 'version': '0.14.4', + }, + 'gettext-dep': { + 'url': 'https://versaweb.dl.sourceforge.net/project/gnuwin32/gettext/0.14.4/gettext-0.14.4-dep.zip', + 'size': 715086, + 'sha256': '411f94974492fd2ecf52590cb05b1023530aec67e64154a88b1e4ebcd9c28588', + }, + 'py2exe': { + 'url': 'https://versaweb.dl.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.zip', + 'size': 149687, + 'sha256': '6bd383312e7d33eef2e43a5f236f9445e4f3e0f6b16333c6f183ed445c44ddbd', + 'version': '0.6.9', + }, + # The VC9 CRT merge modules aren't readily available on most systems because + # they are only installed as part of a full Visual Studio 2008 install. + # While we could potentially extract them from a Visual Studio 2008 + # installer, it is easier to just fetch them from a known URL. + 'vc9-crt-x86-msm': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86.msm', + 'size': 615424, + 'sha256': '837e887ef31b332feb58156f429389de345cb94504228bb9a523c25a9dd3d75e', + }, + 'vc9-crt-x86-msm-policy': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86.msm', + 'size': 71168, + 'sha256': '3fbcf92e3801a0757f36c5e8d304e134a68d5cafd197a6df7734ae3e8825c940', + }, + 'vc9-crt-x64-msm': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/Microsoft_VC90_CRT_x86_x64.msm', + 'size': 662528, + 'sha256': '50d9639b5ad4844a2285269c7551bf5157ec636e32396ddcc6f7ec5bce487a7c', + }, + 'vc9-crt-x64-msm-policy': { + 'url': 'https://github.com/indygreg/vc90-merge-modules/raw/9232f8f0b2135df619bf7946eaa176b4ac35ccff/policy_9_0_Microsoft_VC90_CRT_x86_x64.msm', + 'size': 71168, + 'sha256': '0550ea1929b21239134ad3a678c944ba0f05f11087117b6cf0833e7110686486', + }, + 'virtualenv': { + 'url': 'https://files.pythonhosted.org/packages/37/db/89d6b043b22052109da35416abc3c397655e4bd3cff031446ba02b9654fa/virtualenv-16.4.3.tar.gz', + 'size': 3713208, + 'sha256': '984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39', + 'version': '16.4.3', + }, + 'wix': { + 'url': 'https://github.com/wixtoolset/wix3/releases/download/wix3111rtm/wix311-binaries.zip', + 'size': 34358269, + 'sha256': '37f0a533b0978a454efb5dc3bd3598becf9660aaf4287e55bf68ca6b527d051d', + 'version': '3.11.1', + }, +} + + +def hash_path(p: pathlib.Path): + h = hashlib.sha256() + + with p.open('rb') as fh: + while True: + chunk = fh.read(65536) + if not chunk: + break + + h.update(chunk) + + return h.hexdigest() + + +class IntegrityError(Exception): + """Represents an integrity error when downloading a URL.""" + + +def secure_download_stream(url, size, sha256): + """Securely download a URL to a stream of chunks. + + If the integrity of the download fails, an IntegrityError is + raised. + """ + h = hashlib.sha256() + length = 0 + + with urllib.request.urlopen(url) as fh: + if not url.endswith('.gz') and fh.info().get('Content-Encoding') == 'gzip': + fh = gzip.GzipFile(fileobj=fh) + + while True: + chunk = fh.read(65536) + if not chunk: + break + + h.update(chunk) + length += len(chunk) + + yield chunk + + digest = h.hexdigest() + + if length != size: + raise IntegrityError('size mismatch on %s: wanted %d; got %d' % ( + url, size, length)) + + if digest != sha256: + raise IntegrityError('sha256 mismatch on %s: wanted %s; got %s' % ( + url, sha256, digest)) + + +def download_to_path(url: str, path: pathlib.Path, size: int, sha256: str): + """Download a URL to a filesystem path, possibly with verification.""" + + # We download to a temporary file and rename at the end so there's + # no chance of the final file being partially written or containing + # bad data. + print('downloading %s to %s' % (url, path)) + + if path.exists(): + good = True + + if path.stat().st_size != size: + print('existing file size is wrong; removing') + good = False + + if good: + if hash_path(path) != sha256: + print('existing file hash is wrong; removing') + good = False + + if good: + print('%s exists and passes integrity checks' % path) + return + + path.unlink() + + tmp = path.with_name('%s.tmp' % path.name) + + try: + with tmp.open('wb') as fh: + for chunk in secure_download_stream(url, size, sha256): + fh.write(chunk) + except IntegrityError: + tmp.unlink() + raise + + tmp.rename(path) + print('successfully downloaded %s' % url) + + +def download_entry(name: dict, dest_path: pathlib.Path, local_name=None) -> pathlib.Path: + entry = DOWNLOADS[name] + + url = entry['url'] + + local_name = local_name or url[url.rindex('/') + 1:] + + local_path = dest_path / local_name + download_to_path(url, local_path, entry['size'], entry['sha256']) + + return local_path, entry diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hgpackaging/inno.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/inno.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,78 @@ +# inno.py - Inno Setup functionality. +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import shutil +import subprocess + +from .py2exe import ( + build_py2exe, +) +from .util import ( + find_vc_runtime_files, +) + + +EXTRA_PACKAGES = { + 'dulwich', + 'keyring', + 'pygments', + 'win32ctypes', +} + + +def build(source_dir: pathlib.Path, build_dir: pathlib.Path, + python_exe: pathlib.Path, iscc_exe: pathlib.Path, + version=None): + """Build the Inno installer. + + Build files will be placed in ``build_dir``. + + py2exe's setup.py doesn't use setuptools. It doesn't have modern logic + for finding the Python 2.7 toolchain. So, we require the environment + to already be configured with an active toolchain. + """ + if not iscc_exe.exists(): + raise Exception('%s does not exist' % iscc_exe) + + vc_x64 = r'\x64' in os.environ.get('LIB', '') + + requirements_txt = (source_dir / 'contrib' / 'packaging' / + 'inno' / 'requirements.txt') + + build_py2exe(source_dir, build_dir, python_exe, 'inno', + requirements_txt, extra_packages=EXTRA_PACKAGES) + + # hg.exe depends on VC9 runtime DLLs. Copy those into place. + for f in find_vc_runtime_files(vc_x64): + if f.name.endswith('.manifest'): + basename = 'Microsoft.VC90.CRT.manifest' + else: + basename = f.name + + dest_path = source_dir / 'dist' / basename + + print('copying %s to %s' % (f, dest_path)) + shutil.copyfile(f, dest_path) + + print('creating installer') + + args = [str(iscc_exe)] + + if vc_x64: + args.append('/dARCH=x64') + + if version: + args.append('/dVERSION=%s' % version) + + args.append('/Odist') + args.append('contrib/packaging/inno/mercurial.iss') + + subprocess.run(args, cwd=str(source_dir), check=True) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hgpackaging/py2exe.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/py2exe.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,150 @@ +# py2exe.py - Functionality for performing py2exe builds. +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import subprocess + +from .downloads import ( + download_entry, +) +from .util import ( + extract_tar_to_directory, + extract_zip_to_directory, + python_exe_info, +) + + +def build_py2exe(source_dir: pathlib.Path, build_dir: pathlib.Path, + python_exe: pathlib.Path, build_name: str, + venv_requirements_txt: pathlib.Path, + extra_packages=None, extra_excludes=None, + extra_dll_excludes=None, + extra_packages_script=None): + """Build Mercurial with py2exe. + + Build files will be placed in ``build_dir``. + + py2exe's setup.py doesn't use setuptools. It doesn't have modern logic + for finding the Python 2.7 toolchain. So, we require the environment + to already be configured with an active toolchain. + """ + if 'VCINSTALLDIR' not in os.environ: + raise Exception('not running from a Visual C++ build environment; ' + 'execute the "Visual C++ Command Prompt" ' + 'application shortcut or a vcsvarsall.bat file') + + # Identity x86/x64 and validate the environment matches the Python + # architecture. + vc_x64 = r'\x64' in os.environ['LIB'] + + py_info = python_exe_info(python_exe) + + if vc_x64: + if py_info['arch'] != '64bit': + raise Exception('architecture mismatch: Visual C++ environment ' + 'is configured for 64-bit but Python is 32-bit') + else: + if py_info['arch'] != '32bit': + raise Exception('architecture mismatch: Visual C++ environment ' + 'is configured for 32-bit but Python is 64-bit') + + if py_info['py3']: + raise Exception('Only Python 2 is currently supported') + + build_dir.mkdir(exist_ok=True) + + gettext_pkg, gettext_entry = download_entry('gettext', build_dir) + gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0] + virtualenv_pkg, virtualenv_entry = download_entry('virtualenv', build_dir) + py2exe_pkg, py2exe_entry = download_entry('py2exe', build_dir) + + venv_path = build_dir / ('venv-%s-%s' % (build_name, + 'x64' if vc_x64 else 'x86')) + + gettext_root = build_dir / ( + 'gettext-win-%s' % gettext_entry['version']) + + if not gettext_root.exists(): + extract_zip_to_directory(gettext_pkg, gettext_root) + extract_zip_to_directory(gettext_dep_pkg, gettext_root) + + # This assumes Python 2. We don't need virtualenv on Python 3. + virtualenv_src_path = build_dir / ( + 'virtualenv-%s' % virtualenv_entry['version']) + virtualenv_py = virtualenv_src_path / 'virtualenv.py' + + if not virtualenv_src_path.exists(): + extract_tar_to_directory(virtualenv_pkg, build_dir) + + py2exe_source_path = build_dir / ('py2exe-%s' % py2exe_entry['version']) + + if not py2exe_source_path.exists(): + extract_zip_to_directory(py2exe_pkg, build_dir) + + if not venv_path.exists(): + print('creating virtualenv with dependencies') + subprocess.run( + [str(python_exe), str(virtualenv_py), str(venv_path)], + check=True) + + venv_python = venv_path / 'Scripts' / 'python.exe' + venv_pip = venv_path / 'Scripts' / 'pip.exe' + + subprocess.run([str(venv_pip), 'install', '-r', str(venv_requirements_txt)], + check=True) + + # Force distutils to use VC++ settings from environment, which was + # validated above. + env = dict(os.environ) + env['DISTUTILS_USE_SDK'] = '1' + env['MSSdk'] = '1' + + if extra_packages_script: + more_packages = set(subprocess.check_output( + extra_packages_script, + cwd=build_dir).split(b'\0')[-1].strip().decode('utf-8').splitlines()) + if more_packages: + if not extra_packages: + extra_packages = more_packages + else: + extra_packages |= more_packages + + if extra_packages: + env['HG_PY2EXE_EXTRA_PACKAGES'] = ' '.join(sorted(extra_packages)) + hgext3rd_extras = sorted( + e for e in extra_packages if e.startswith('hgext3rd.')) + if hgext3rd_extras: + env['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'] = ' '.join(hgext3rd_extras) + if extra_excludes: + env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes)) + if extra_dll_excludes: + env['HG_PY2EXE_EXTRA_DLL_EXCLUDES'] = ' '.join( + sorted(extra_dll_excludes)) + + py2exe_py_path = venv_path / 'Lib' / 'site-packages' / 'py2exe' + if not py2exe_py_path.exists(): + print('building py2exe') + subprocess.run([str(venv_python), 'setup.py', 'install'], + cwd=py2exe_source_path, + env=env, + check=True) + + # Register location of msgfmt and other binaries. + env['PATH'] = '%s%s%s' % ( + env['PATH'], os.pathsep, str(gettext_root / 'bin')) + + print('building Mercurial') + subprocess.run( + [str(venv_python), 'setup.py', + 'py2exe', + 'build_doc', '--html'], + cwd=str(source_dir), + env=env, + check=True) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hgpackaging/util.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/util.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,155 @@ +# util.py - Common packaging utility code. +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import distutils.version +import getpass +import os +import pathlib +import subprocess +import tarfile +import zipfile + + +def extract_tar_to_directory(source: pathlib.Path, dest: pathlib.Path): + with tarfile.open(source, 'r') as tf: + tf.extractall(dest) + + +def extract_zip_to_directory(source: pathlib.Path, dest: pathlib.Path): + with zipfile.ZipFile(source, 'r') as zf: + zf.extractall(dest) + + +def find_vc_runtime_files(x64=False): + """Finds Visual C++ Runtime DLLs to include in distribution.""" + winsxs = pathlib.Path(os.environ['SYSTEMROOT']) / 'WinSxS' + + prefix = 'amd64' if x64 else 'x86' + + candidates = sorted(p for p in os.listdir(winsxs) + if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)) + + for p in candidates: + print('found candidate VC runtime: %s' % p) + + # Take the newest version. + version = candidates[-1] + + d = winsxs / version + + return [ + d / 'msvcm90.dll', + d / 'msvcp90.dll', + d / 'msvcr90.dll', + winsxs / 'Manifests' / ('%s.manifest' % version), + ] + + +def windows_10_sdk_info(): + """Resolves information about the Windows 10 SDK.""" + + base = pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Windows Kits' / '10' + + if not base.is_dir(): + raise Exception('unable to find Windows 10 SDK at %s' % base) + + # Find the latest version. + bin_base = base / 'bin' + + versions = [v for v in os.listdir(bin_base) if v.startswith('10.')] + version = sorted(versions, reverse=True)[0] + + bin_version = bin_base / version + + return { + 'root': base, + 'version': version, + 'bin_root': bin_version, + 'bin_x86': bin_version / 'x86', + 'bin_x64': bin_version / 'x64' + } + + +def find_signtool(): + """Find signtool.exe from the Windows SDK.""" + sdk = windows_10_sdk_info() + + for key in ('bin_x64', 'bin_x86'): + p = sdk[key] / 'signtool.exe' + + if p.exists(): + return p + + raise Exception('could not find signtool.exe in Windows 10 SDK') + + +def sign_with_signtool(file_path, description, subject_name=None, + cert_path=None, cert_password=None, + timestamp_url=None): + """Digitally sign a file with signtool.exe. + + ``file_path`` is file to sign. + ``description`` is text that goes in the signature. + + The signing certificate can be specified by ``cert_path`` or + ``subject_name``. These correspond to the ``/f`` and ``/n`` arguments + to signtool.exe, respectively. + + The certificate password can be specified via ``cert_password``. If + not provided, you will be prompted for the password. + + ``timestamp_url`` is the URL of a RFC 3161 timestamp server (``/tr`` + argument to signtool.exe). + """ + if cert_path and subject_name: + raise ValueError('cannot specify both cert_path and subject_name') + + while cert_path and not cert_password: + cert_password = getpass.getpass('password for %s: ' % cert_path) + + args = [ + str(find_signtool()), 'sign', + '/v', + '/fd', 'sha256', + '/d', description, + ] + + if cert_path: + args.extend(['/f', str(cert_path), '/p', cert_password]) + elif subject_name: + args.extend(['/n', subject_name]) + + if timestamp_url: + args.extend(['/tr', timestamp_url, '/td', 'sha256']) + + args.append(str(file_path)) + + print('signing %s' % file_path) + subprocess.run(args, check=True) + + +PRINT_PYTHON_INFO = ''' +import platform; print("%s:%s" % (platform.architecture()[0], platform.python_version())) +'''.strip() + + +def python_exe_info(python_exe: pathlib.Path): + """Obtain information about a Python executable.""" + + res = subprocess.check_output([str(python_exe), '-c', PRINT_PYTHON_INFO]) + + arch, version = res.decode('utf-8').split(':') + + version = distutils.version.LooseVersion(version) + + return { + 'arch': arch, + 'version': version, + 'py3': version >= distutils.version.LooseVersion('3'), + } diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/hgpackaging/wix.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/hgpackaging/wix.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,327 @@ +# wix.py - WiX installer functionality +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +import os +import pathlib +import re +import subprocess +import tempfile +import typing +import xml.dom.minidom + +from .downloads import ( + download_entry, +) +from .py2exe import ( + build_py2exe, +) +from .util import ( + extract_zip_to_directory, + sign_with_signtool, +) + + +SUPPORT_WXS = [ + ('contrib.wxs', r'contrib'), + ('dist.wxs', r'dist'), + ('doc.wxs', r'doc'), + ('help.wxs', r'mercurial\help'), + ('i18n.wxs', r'i18n'), + ('locale.wxs', r'mercurial\locale'), + ('templates.wxs', r'mercurial\templates'), +] + + +EXTRA_PACKAGES = { + 'distutils', + 'pygments', +} + + +def find_version(source_dir: pathlib.Path): + version_py = source_dir / 'mercurial' / '__version__.py' + + with version_py.open('r', encoding='utf-8') as fh: + source = fh.read().strip() + + m = re.search('version = b"(.*)"', source) + return m.group(1) + + +def normalize_version(version): + """Normalize Mercurial version string so WiX accepts it. + + Version strings have to be numeric X.Y.Z. + """ + + if '+' in version: + version, extra = version.split('+', 1) + else: + extra = None + + # 4.9rc0 + if version[:-1].endswith('rc'): + version = version[:-3] + + versions = [int(v) for v in version.split('.')] + while len(versions) < 3: + versions.append(0) + + major, minor, build = versions[:3] + + if extra: + # -+ + build = int(extra.split('-')[0]) + + return '.'.join('%d' % x for x in (major, minor, build)) + + +def ensure_vc90_merge_modules(build_dir): + x86 = ( + download_entry('vc9-crt-x86-msm', build_dir, + local_name='microsoft.vcxx.crt.x86_msm.msm')[0], + download_entry('vc9-crt-x86-msm-policy', build_dir, + local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm')[0] + ) + + x64 = ( + download_entry('vc9-crt-x64-msm', build_dir, + local_name='microsoft.vcxx.crt.x64_msm.msm')[0], + download_entry('vc9-crt-x64-msm-policy', build_dir, + local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm')[0] + ) + return { + 'x86': x86, + 'x64': x64, + } + + +def run_candle(wix, cwd, wxs, source_dir, defines=None): + args = [ + str(wix / 'candle.exe'), + '-nologo', + str(wxs), + '-dSourceDir=%s' % source_dir, + ] + + if defines: + args.extend('-d%s=%s' % define for define in sorted(defines.items())) + + subprocess.run(args, cwd=str(cwd), check=True) + + +def make_post_build_signing_fn(name, subject_name=None, cert_path=None, + cert_password=None, timestamp_url=None): + """Create a callable that will use signtool to sign hg.exe.""" + + def post_build_sign(source_dir, build_dir, dist_dir, version): + description = '%s %s' % (name, version) + + sign_with_signtool(dist_dir / 'hg.exe', description, + subject_name=subject_name, cert_path=cert_path, + cert_password=cert_password, + timestamp_url=timestamp_url) + + return post_build_sign + + +LIBRARIES_XML = ''' + + + + + + + + + + + + + + + +'''.lstrip() + + +def make_libraries_xml(wix_dir: pathlib.Path, dist_dir: pathlib.Path): + """Make XML data for library components WXS.""" + # We can't use ElementTree because it doesn't handle the + # directives. + doc = xml.dom.minidom.parseString( + LIBRARIES_XML.format(wix_dir=str(wix_dir))) + + component = doc.getElementsByTagName('Component')[0] + + f = doc.createElement('File') + f.setAttribute('Name', 'library.zip') + f.setAttribute('KeyPath', 'yes') + component.appendChild(f) + + lib_dir = dist_dir / 'lib' + + for p in sorted(lib_dir.iterdir()): + if not p.name.endswith(('.dll', '.pyd')): + continue + + f = doc.createElement('File') + f.setAttribute('Name', p.name) + component.appendChild(f) + + return doc.toprettyxml() + + +def build_installer(source_dir: pathlib.Path, python_exe: pathlib.Path, + msi_name='mercurial', version=None, post_build_fn=None, + extra_packages_script=None, + extra_wxs:typing.Optional[typing.Dict[str,str]]=None, + extra_features:typing.Optional[typing.List[str]]=None): + """Build a WiX MSI installer. + + ``source_dir`` is the path to the Mercurial source tree to use. + ``arch`` is the target architecture. either ``x86`` or ``x64``. + ``python_exe`` is the path to the Python executable to use/bundle. + ``version`` is the Mercurial version string. If not defined, + ``mercurial/__version__.py`` will be consulted. + ``post_build_fn`` is a callable that will be called after building + Mercurial but before invoking WiX. It can be used to e.g. facilitate + signing. It is passed the paths to the Mercurial source, build, and + dist directories and the resolved Mercurial version. + ``extra_packages_script`` is a command to be run to inject extra packages + into the py2exe binary. It should stage packages into the virtualenv and + print a null byte followed by a newline-separated list of packages that + should be included in the exe. + ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}. + ``extra_features`` is a list of additional named Features to include in + the build. These must match Feature names in one of the wxs scripts. + """ + arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86' + + hg_build_dir = source_dir / 'build' + dist_dir = source_dir / 'dist' + wix_dir = source_dir / 'contrib' / 'packaging' / 'wix' + + requirements_txt = wix_dir / 'requirements.txt' + + build_py2exe(source_dir, hg_build_dir, + python_exe, 'wix', requirements_txt, + extra_packages=EXTRA_PACKAGES, + extra_packages_script=extra_packages_script) + + version = version or normalize_version(find_version(source_dir)) + print('using version string: %s' % version) + + if post_build_fn: + post_build_fn(source_dir, hg_build_dir, dist_dir, version) + + build_dir = hg_build_dir / ('wix-%s' % arch) + + build_dir.mkdir(exist_ok=True) + + wix_pkg, wix_entry = download_entry('wix', hg_build_dir) + wix_path = hg_build_dir / ('wix-%s' % wix_entry['version']) + + if not wix_path.exists(): + extract_zip_to_directory(wix_pkg, wix_path) + + ensure_vc90_merge_modules(hg_build_dir) + + source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir)) + + defines = {'Platform': arch} + + for wxs, rel_path in SUPPORT_WXS: + wxs = wix_dir / wxs + wxs_source_dir = source_dir / rel_path + run_candle(wix_path, build_dir, wxs, wxs_source_dir, defines=defines) + + for source, rel_path in sorted((extra_wxs or {}).items()): + run_candle(wix_path, build_dir, source, rel_path, defines=defines) + + # candle.exe doesn't like when we have an open handle on the file. + # So use TemporaryDirectory() instead of NamedTemporaryFile(). + with tempfile.TemporaryDirectory() as td: + td = pathlib.Path(td) + + tf = td / 'library.wxs' + with tf.open('w') as fh: + fh.write(make_libraries_xml(wix_dir, dist_dir)) + + run_candle(wix_path, build_dir, tf, dist_dir, defines=defines) + + source = wix_dir / 'mercurial.wxs' + defines['Version'] = version + defines['Comments'] = 'Installs Mercurial version %s' % version + defines['VCRedistSrcDir'] = str(hg_build_dir) + if extra_features: + assert all(';' not in f for f in extra_features) + defines['MercurialExtraFeatures'] = ';'.join(extra_features) + + run_candle(wix_path, build_dir, source, source_build_rel, defines=defines) + + msi_path = source_dir / 'dist' / ( + '%s-%s-%s.msi' % (msi_name, version, arch)) + + args = [ + str(wix_path / 'light.exe'), + '-nologo', + '-ext', 'WixUIExtension', + '-sw1076', + '-spdb', + '-o', str(msi_path), + ] + + for source, rel_path in SUPPORT_WXS: + assert source.endswith('.wxs') + args.append(str(build_dir / ('%s.wixobj' % source[:-4]))) + + for source, rel_path in sorted((extra_wxs or {}).items()): + assert source.endswith('.wxs') + source = os.path.basename(source) + args.append(str(build_dir / ('%s.wixobj' % source[:-4]))) + + args.extend([ + str(build_dir / 'library.wixobj'), + str(build_dir / 'mercurial.wixobj'), + ]) + + subprocess.run(args, cwd=str(source_dir), check=True) + + print('%s created' % msi_path) + + return { + 'msi_path': msi_path, + } + + +def build_signed_installer(source_dir: pathlib.Path, python_exe: pathlib.Path, + name: str, version=None, subject_name=None, + cert_path=None, cert_password=None, + timestamp_url=None, extra_packages_script=None, + extra_wxs=None, extra_features=None): + """Build an installer with signed executables.""" + + post_build_fn = make_post_build_signing_fn( + name, + subject_name=subject_name, + cert_path=cert_path, + cert_password=cert_password, + timestamp_url=timestamp_url) + + info = build_installer(source_dir, python_exe=python_exe, + msi_name=name.lower(), version=version, + post_build_fn=post_build_fn, + extra_packages_script=extra_packages_script, + extra_wxs=extra_wxs, extra_features=extra_features) + + description = '%s %s' % (name, version) + + sign_with_signtool(info['msi_path'], description, + subject_name=subject_name, cert_path=cert_path, + cert_password=cert_password, timestamp_url=timestamp_url) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/inno/build.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/build.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +# build.py - Inno installer build script. +# +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# This script automates the building of the Inno MSI installer for Mercurial. + +# no-check-code because Python 3 native. + +import argparse +import os +import pathlib +import sys + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument('--python', + required=True, + help='path to python.exe to use') + parser.add_argument('--iscc', + help='path to iscc.exe to use') + parser.add_argument('--version', + help='Mercurial version string to use ' + '(detected from __version__.py if not defined') + + args = parser.parse_args() + + if not os.path.isabs(args.python): + raise Exception('--python arg must be an absolute path') + + if args.iscc: + iscc = pathlib.Path(args.iscc) + else: + iscc = (pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Inno Setup 5' / + 'ISCC.exe') + + here = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) + source_dir = here.parent.parent.parent + build_dir = source_dir / 'build' + + sys.path.insert(0, str(source_dir / 'contrib' / 'packaging')) + + from hgpackaging.inno import build + + build(source_dir, build_dir, pathlib.Path(args.python), iscc, + version=args.version) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/inno/mercurial.iss --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/mercurial.iss Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,124 @@ +; Script generated by the Inno Setup Script Wizard. +; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES! + +#ifndef VERSION +#define FileHandle +#define FileLine +#define VERSION = "unknown" +#if FileHandle = FileOpen(SourcePath + "\..\..\..\mercurial\__version__.py") + #expr FileLine = FileRead(FileHandle) + #expr FileLine = FileRead(FileHandle) + #define VERSION = Copy(FileLine, Pos('"', FileLine)+1, Len(FileLine)-Pos('"', FileLine)-1) +#endif +#if FileHandle + #expr FileClose(FileHandle) +#endif +#pragma message "Detected Version: " + VERSION +#endif + +#ifndef ARCH +#define ARCH = "x86" +#endif + +[Setup] +AppCopyright=Copyright 2005-2019 Matt Mackall and others +AppName=Mercurial +AppVersion={#VERSION} +#if ARCH == "x64" +AppVerName=Mercurial {#VERSION} (64-bit) +OutputBaseFilename=Mercurial-{#VERSION}-x64 +ArchitecturesAllowed=x64 +ArchitecturesInstallIn64BitMode=x64 +#else +AppVerName=Mercurial {#VERSION} +OutputBaseFilename=Mercurial-{#VERSION} +#endif +InfoAfterFile=contrib/win32/postinstall.txt +LicenseFile=COPYING +ShowLanguageDialog=yes +AppPublisher=Matt Mackall and others +AppPublisherURL=https://mercurial-scm.org/ +AppSupportURL=https://mercurial-scm.org/ +AppUpdatesURL=https://mercurial-scm.org/ +AppID={{4B95A5F1-EF59-4B08-BED8-C891C46121B3} +AppContact=mercurial@mercurial-scm.org +DefaultDirName={pf}\Mercurial +SourceDir=..\..\.. +VersionInfoDescription=Mercurial distributed SCM (version {#VERSION}) +VersionInfoCopyright=Copyright 2005-2019 Matt Mackall and others +VersionInfoCompany=Matt Mackall and others +InternalCompressLevel=max +SolidCompression=true +SetupIconFile=contrib\win32\mercurial.ico +AllowNoIcons=true +DefaultGroupName=Mercurial +PrivilegesRequired=none +ChangesEnvironment=true + +[Files] +Source: contrib\mercurial.el; DestDir: {app}/Contrib +Source: contrib\vim\*.*; DestDir: {app}/Contrib/Vim +Source: contrib\zsh_completion; DestDir: {app}/Contrib +Source: contrib\bash_completion; DestDir: {app}/Contrib +Source: contrib\tcsh_completion; DestDir: {app}/Contrib +Source: contrib\tcsh_completion_build.sh; DestDir: {app}/Contrib +Source: contrib\hgk; DestDir: {app}/Contrib; DestName: hgk.tcl +Source: contrib\xml.rnc; DestDir: {app}/Contrib +Source: contrib\mercurial.el; DestDir: {app}/Contrib +Source: contrib\mq.el; DestDir: {app}/Contrib +Source: contrib\hgweb.fcgi; DestDir: {app}/Contrib +Source: contrib\hgweb.wsgi; DestDir: {app}/Contrib +Source: contrib\win32\ReadMe.html; DestDir: {app}; Flags: isreadme +Source: contrib\win32\postinstall.txt; DestDir: {app}; DestName: ReleaseNotes.txt +Source: dist\hg.exe; DestDir: {app}; AfterInstall: Touch('{app}\hg.exe.local') +Source: dist\lib\*.dll; Destdir: {app}\lib +Source: dist\lib\*.pyd; Destdir: {app}\lib +Source: dist\python*.dll; Destdir: {app}; Flags: skipifsourcedoesntexist +Source: dist\msvc*.dll; DestDir: {app}; Flags: skipifsourcedoesntexist +Source: dist\Microsoft.VC*.CRT.manifest; DestDir: {app}; Flags: skipifsourcedoesntexist +Source: dist\lib\library.zip; DestDir: {app}\lib +Source: doc\*.html; DestDir: {app}\Docs +Source: doc\style.css; DestDir: {app}\Docs +Source: mercurial\help\*.txt; DestDir: {app}\help +Source: mercurial\help\internals\*.txt; DestDir: {app}\help\internals +Source: mercurial\default.d\*.rc; DestDir: {app}\default.d +Source: mercurial\locale\*.*; DestDir: {app}\locale; Flags: recursesubdirs createallsubdirs skipifsourcedoesntexist +Source: mercurial\templates\*.*; DestDir: {app}\Templates; Flags: recursesubdirs createallsubdirs +Source: CONTRIBUTORS; DestDir: {app}; DestName: Contributors.txt +Source: COPYING; DestDir: {app}; DestName: Copying.txt + +[INI] +Filename: {app}\Mercurial.url; Section: InternetShortcut; Key: URL; String: https://mercurial-scm.org/ +Filename: {app}\default.d\editor.rc; Section: ui; Key: editor; String: notepad + +[UninstallDelete] +Type: files; Name: {app}\Mercurial.url +Type: filesandordirs; Name: {app}\default.d +Type: files; Name: "{app}\hg.exe.local" + +[Icons] +Name: {group}\Uninstall Mercurial; Filename: {uninstallexe} +Name: {group}\Mercurial Command Reference; Filename: {app}\Docs\hg.1.html +Name: {group}\Mercurial Configuration Files; Filename: {app}\Docs\hgrc.5.html +Name: {group}\Mercurial Ignore Files; Filename: {app}\Docs\hgignore.5.html +Name: {group}\Mercurial Web Site; Filename: {app}\Mercurial.url + +[Tasks] +Name: modifypath; Description: Add the installation path to the search path; Flags: unchecked + +[Code] +procedure Touch(fn: String); +begin + SaveStringToFile(ExpandConstant(fn), '', False); +end; + +const + ModPathName = 'modifypath'; + ModPathType = 'user'; + +function ModPathDir(): TArrayOfString; +begin + setArrayLength(Result, 1) + Result[0] := ExpandConstant('{app}'); +end; +#include "modpath.iss" diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/inno/modpath.iss --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/modpath.iss Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,219 @@ +// ---------------------------------------------------------------------------- +// +// Inno Setup Ver: 5.4.2 +// Script Version: 1.4.2 +// Author: Jared Breland +// Homepage: http://www.legroom.net/software +// License: GNU Lesser General Public License (LGPL), version 3 +// http://www.gnu.org/licenses/lgpl.html +// +// Script Function: +// Allow modification of environmental path directly from Inno Setup installers +// +// Instructions: +// Copy modpath.iss to the same directory as your setup script +// +// Add this statement to your [Setup] section +// ChangesEnvironment=true +// +// Add this statement to your [Tasks] section +// You can change the Description or Flags +// You can change the Name, but it must match the ModPathName setting below +// Name: modifypath; Description: &Add application directory to your environmental path; Flags: unchecked +// +// Add the following to the end of your [Code] section +// ModPathName defines the name of the task defined above +// ModPathType defines whether the 'user' or 'system' path will be modified; +// this will default to user if anything other than system is set +// setArrayLength must specify the total number of dirs to be added +// Result[0] contains first directory, Result[1] contains second, etc. +// const +// ModPathName = 'modifypath'; +// ModPathType = 'user'; +// +// function ModPathDir(): TArrayOfString; +// begin +// setArrayLength(Result, 1); +// Result[0] := ExpandConstant('{app}'); +// end; +// #include "modpath.iss" +// ---------------------------------------------------------------------------- + +procedure ModPath(); +var + oldpath: String; + newpath: String; + updatepath: Boolean; + pathArr: TArrayOfString; + aExecFile: String; + aExecArr: TArrayOfString; + i, d: Integer; + pathdir: TArrayOfString; + regroot: Integer; + regpath: String; + +begin + // Get constants from main script and adjust behavior accordingly + // ModPathType MUST be 'system' or 'user'; force 'user' if invalid + if ModPathType = 'system' then begin + regroot := HKEY_LOCAL_MACHINE; + regpath := 'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'; + end else begin + regroot := HKEY_CURRENT_USER; + regpath := 'Environment'; + end; + + // Get array of new directories and act on each individually + pathdir := ModPathDir(); + for d := 0 to GetArrayLength(pathdir)-1 do begin + updatepath := true; + + // Modify WinNT path + if UsingWinNT() = true then begin + + // Get current path, split into an array + RegQueryStringValue(regroot, regpath, 'Path', oldpath); + oldpath := oldpath + ';'; + i := 0; + + while (Pos(';', oldpath) > 0) do begin + SetArrayLength(pathArr, i+1); + pathArr[i] := Copy(oldpath, 0, Pos(';', oldpath)-1); + oldpath := Copy(oldpath, Pos(';', oldpath)+1, Length(oldpath)); + i := i + 1; + + // Check if current directory matches app dir + if pathdir[d] = pathArr[i-1] then begin + // if uninstalling, remove dir from path + if IsUninstaller() = true then begin + continue; + // if installing, flag that dir already exists in path + end else begin + updatepath := false; + end; + end; + + // Add current directory to new path + if i = 1 then begin + newpath := pathArr[i-1]; + end else begin + newpath := newpath + ';' + pathArr[i-1]; + end; + end; + + // Append app dir to path if not already included + if (IsUninstaller() = false) AND (updatepath = true) then + newpath := newpath + ';' + pathdir[d]; + + // Write new path + RegWriteStringValue(regroot, regpath, 'Path', newpath); + + // Modify Win9x path + end else begin + + // Convert to shortened dirname + pathdir[d] := GetShortName(pathdir[d]); + + // If autoexec.bat exists, check if app dir already exists in path + aExecFile := 'C:\AUTOEXEC.BAT'; + if FileExists(aExecFile) then begin + LoadStringsFromFile(aExecFile, aExecArr); + for i := 0 to GetArrayLength(aExecArr)-1 do begin + if IsUninstaller() = false then begin + // If app dir already exists while installing, skip add + if (Pos(pathdir[d], aExecArr[i]) > 0) then + updatepath := false; + break; + end else begin + // If app dir exists and = what we originally set, then delete at uninstall + if aExecArr[i] = 'SET PATH=%PATH%;' + pathdir[d] then + aExecArr[i] := ''; + end; + end; + end; + + // If app dir not found, or autoexec.bat didn't exist, then (create and) append to current path + if (IsUninstaller() = false) AND (updatepath = true) then begin + SaveStringToFile(aExecFile, #13#10 + 'SET PATH=%PATH%;' + pathdir[d], True); + + // If uninstalling, write the full autoexec out + end else begin + SaveStringsToFile(aExecFile, aExecArr, False); + end; + end; + end; +end; + +// Split a string into an array using passed delimeter +procedure MPExplode(var Dest: TArrayOfString; Text: String; Separator: String); +var + i: Integer; +begin + i := 0; + repeat + SetArrayLength(Dest, i+1); + if Pos(Separator,Text) > 0 then begin + Dest[i] := Copy(Text, 1, Pos(Separator, Text)-1); + Text := Copy(Text, Pos(Separator,Text) + Length(Separator), Length(Text)); + i := i + 1; + end else begin + Dest[i] := Text; + Text := ''; + end; + until Length(Text)=0; +end; + + +procedure CurStepChanged(CurStep: TSetupStep); +var + taskname: String; +begin + taskname := ModPathName; + if CurStep = ssPostInstall then + if IsTaskSelected(taskname) then + ModPath(); +end; + +procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep); +var + aSelectedTasks: TArrayOfString; + i: Integer; + taskname: String; + regpath: String; + regstring: String; + appid: String; +begin + // only run during actual uninstall + if CurUninstallStep = usUninstall then begin + // get list of selected tasks saved in registry at install time + appid := '{#emit SetupSetting("AppId")}'; + if appid = '' then appid := '{#emit SetupSetting("AppName")}'; + regpath := ExpandConstant('Software\Microsoft\Windows\CurrentVersion\Uninstall\'+appid+'_is1'); + RegQueryStringValue(HKLM, regpath, 'Inno Setup: Selected Tasks', regstring); + if regstring = '' then RegQueryStringValue(HKCU, regpath, 'Inno Setup: Selected Tasks', regstring); + + // check each task; if matches modpath taskname, trigger patch removal + if regstring <> '' then begin + taskname := ModPathName; + MPExplode(aSelectedTasks, regstring, ','); + if GetArrayLength(aSelectedTasks) > 0 then begin + for i := 0 to GetArrayLength(aSelectedTasks)-1 do begin + if comparetext(aSelectedTasks[i], taskname) = 0 then + ModPath(); + end; + end; + end; + end; +end; + +function NeedRestart(): Boolean; +var + taskname: String; +begin + taskname := ModPathName; + if IsTaskSelected(taskname) and not UsingWinNT() then begin + Result := True; + end else begin + Result := False; + end; +end; diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/inno/readme.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/readme.rst Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,61 @@ +Requirements +============ + +Building the Inno installer requires a Windows machine. + +The following system dependencies must be installed: + +* Python 2.7 (download from https://www.python.org/downloads/) +* Microsoft Visual C++ Compiler for Python 2.7 + (https://www.microsoft.com/en-us/download/details.aspx?id=44266) +* Inno Setup (http://jrsoftware.org/isdl.php) version 5.4 or newer. + Be sure to install the optional Inno Setup Preprocessor feature, + which is required. +* Python 3.5+ (to run the ``build.py`` script) + +Building +======== + +The ``build.py`` script automates the process of producing an +Inno installer. It manages fetching and configuring the +non-system dependencies (such as py2exe, gettext, and various +Python packages). + +The script requires an activated ``Visual C++ 2008`` command prompt. +A shortcut to such a prompt was installed with ``Microsoft Visual C++ +Compiler for Python 2.7``. From your Start Menu, look for +``Microsoft Visual C++ Compiler Package for Python 2.7`` then launch +either ``Visual C++ 2008 32-bit Command Prompt`` or +``Visual C++ 2008 64-bit Command Prompt``. + +From the prompt, change to the Mercurial source directory. e.g. +``cd c:\src\hg``. + +Next, invoke ``build.py`` to produce an Inno installer. You will +need to supply the path to the Python interpreter to use.: + + $ python3.exe contrib\packaging\inno\build.py \ + --python c:\python27\python.exe + +.. note:: + + The script validates that the Visual C++ environment is + active and that the architecture of the specified Python + interpreter matches the Visual C++ environment and errors + if not. + +If everything runs as intended, dependencies will be fetched and +configured into the ``build`` sub-directory, Mercurial will be built, +and an installer placed in the ``dist`` sub-directory. The final +line of output should print the name of the generated installer. + +Additional options may be configured. Run ``build.py --help`` to +see a list of program flags. + +MinGW +===== + +It is theoretically possible to generate an installer that uses +MinGW. This isn't well tested and ``build.py`` and may properly +support it. See old versions of this file in version control for +potentially useful hints as to how to achieve this. diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/inno/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/requirements.txt Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes contrib/packaging/inno/requirements.txt.in -o contrib/packaging/inno/requirements.txt +# +certifi==2018.11.29 \ + --hash=sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7 \ + --hash=sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033 \ + # via dulwich +configparser==3.7.3 \ + --hash=sha256:27594cf4fc279f321974061ac69164aaebd2749af962ac8686b20503ac0bcf2d \ + --hash=sha256:9d51fe0a382f05b6b117c5e601fc219fede4a8c71703324af3f7d883aef476a3 \ + # via entrypoints +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 +dulwich==0.19.11 \ + --hash=sha256:afbe070f6899357e33f63f3f3696e601731fef66c64a489dea1bc9f539f4a725 +entrypoints==0.3 \ + --hash=sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19 \ + --hash=sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451 \ + # via keyring +keyring==18.0.0 \ + --hash=sha256:12833d2b05d2055e0e25931184af9cd6a738f320a2264853cabbd8a3a0f0b65d \ + --hash=sha256:ca33f5ccc542b9ffaa196ee9a33488069e5e7eac77d5b81969f8a3ce74d0230c +pygments==2.3.1 \ + --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \ + --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d +pywin32-ctypes==0.2.0 \ + --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \ + --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98 \ + # via keyring +urllib3==1.24.1 \ + --hash=sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39 \ + --hash=sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22 \ + # via dulwich diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/inno/requirements.txt.in --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/inno/requirements.txt.in Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,4 @@ +docutils +dulwich +keyring +pygments diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/COPYING.rtf Binary file contrib/packaging/wix/COPYING.rtf has changed diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/build.py --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/build.py Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 +# Copyright 2019 Gregory Szorc +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# no-check-code because Python 3 native. + +"""Code to build Mercurial WiX installer.""" + +import argparse +import os +import pathlib +import sys + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument('--name', + help='Application name', + default='Mercurial') + parser.add_argument('--python', + help='Path to Python executable to use', + required=True) + parser.add_argument('--sign-sn', + help='Subject name (or fragment thereof) of certificate ' + 'to use for signing') + parser.add_argument('--sign-cert', + help='Path to certificate to use for signing') + parser.add_argument('--sign-password', + help='Password for signing certificate') + parser.add_argument('--sign-timestamp-url', + help='URL of timestamp server to use for signing') + parser.add_argument('--version', + help='Version string to use') + parser.add_argument('--extra-packages-script', + help=('Script to execute to include extra packages in ' + 'py2exe binary.')) + parser.add_argument('--extra-wxs', + help='CSV of path_to_wxs_file=working_dir_for_wxs_file') + parser.add_argument('--extra-features', + help=('CSV of extra feature names to include ' + 'in the installer from the extra wxs files')) + + args = parser.parse_args() + + here = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) + source_dir = here.parent.parent.parent + + sys.path.insert(0, str(source_dir / 'contrib' / 'packaging')) + + from hgpackaging.wix import ( + build_installer, + build_signed_installer, + ) + + fn = build_installer + kwargs = { + 'source_dir': source_dir, + 'python_exe': pathlib.Path(args.python), + 'version': args.version, + } + + if not os.path.isabs(args.python): + raise Exception('--python arg must be an absolute path') + + if args.extra_packages_script: + kwargs['extra_packages_script'] = args.extra_packages_script + if args.extra_wxs: + kwargs['extra_wxs'] = dict( + thing.split("=") for thing in args.extra_wxs.split(',')) + if args.extra_features: + kwargs['extra_features'] = args.extra_features.split(',') + + if args.sign_sn or args.sign_cert: + fn = build_signed_installer + kwargs['name'] = args.name + kwargs['subject_name'] = args.sign_sn + kwargs['cert_path'] = args.sign_cert + kwargs['cert_password'] = args.sign_password + kwargs['timestamp_url'] = args.sign_timestamp_url + + fn(**kwargs) diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/contrib.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/contrib.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/defines.wxi --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/defines.wxi Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,9 @@ + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/dist.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/dist.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/doc.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/doc.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/guids.wxi --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/guids.wxi Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/help.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/help.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/i18n.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/i18n.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/locale.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/locale.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/mercurial.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/mercurial.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + VersionNT >= 501 + + + + + + + + mercurial@mercurial-scm.org + https://mercurial-scm.org/wiki/ + https://mercurial-scm.org/about/ + https://mercurial-scm.org/downloads/ + https://mercurial-scm.org/wiki/Support + hgIcon.ico + + + amus + + + 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/readme.rst --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/readme.rst Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,71 @@ +WiX Installer +============= + +The files in this directory are used to produce an MSI installer using +the WiX Toolset (http://wixtoolset.org/). + +The MSI installers require elevated (admin) privileges due to the +installation of MSVC CRT libraries into the Windows system store. See +the Inno Setup installers in the ``inno`` sibling directory for installers +that do not have this requirement. + +Requirements +============ + +Building the WiX installers requires a Windows machine. The following +dependencies must be installed: + +* Python 2.7 (download from https://www.python.org/downloads/) +* Microsoft Visual C++ Compiler for Python 2.7 + (https://www.microsoft.com/en-us/download/details.aspx?id=44266) +* Python 3.5+ (to run the ``build.py`` script) + +Building +======== + +The ``build.py`` script automates the process of producing an MSI +installer. It manages fetching and configuring non-system dependencies +(such as py2exe, gettext, and various Python packages). + +The script requires an activated ``Visual C++ 2008`` command prompt. +A shortcut to such a prompt was installed with ``Microsoft Visual +C++ Compiler for Python 2.7``. From your Start Menu, look for +``Microsoft Visual C++ Compiler Package for Python 2.7`` then +launch either ``Visual C++ 2008 32-bit Command Prompt`` or +``Visual C++ 2008 64-bit Command Prompt``. + +From the prompt, change to the Mercurial source directory. e.g. +``cd c:\src\hg``. + +Next, invoke ``build.py`` to produce an MSI installer. You will need +to supply the path to the Python interpreter to use.:: + + $ python3 contrib\packaging\wix\build.py \ + --python c:\python27\python.exe + +.. note:: + + The script validates that the Visual C++ environment is active and + that the architecture of the specified Python interpreter matches the + Visual C++ environment. An error is raised otherwise. + +If everything runs as intended, dependencies will be fetched and +configured into the ``build`` sub-directory, Mercurial will be built, +and an installer placed in the ``dist`` sub-directory. The final line +of output should print the name of the generated installer. + +Additional options may be configured. Run ``build.py --help`` to see +a list of program flags. + +Relationship to TortoiseHG +========================== + +TortoiseHG uses the WiX files in this directory. + +The code for building TortoiseHG installers lives at +https://bitbucket.org/tortoisehg/thg-winbuild and is maintained by +Steve Borho (steve@borho.org). + +When changing behavior of the WiX installer, be sure to notify +the TortoiseHG Project of the changes so they have ample time +provide feedback and react to those changes. diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/requirements.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/requirements.txt Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,13 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes contrib/packaging/wix/requirements.txt.in -o contrib/packaging/wix/requirements.txt +# +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 +pygments==2.3.1 \ + --hash=sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a \ + --hash=sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/requirements.txt.in --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/requirements.txt.in Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,2 @@ +docutils +pygments diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/packaging/wix/templates.wxs --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/packaging/wix/templates.wxs Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,251 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/perf-utils/discovery-helper.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/contrib/perf-utils/discovery-helper.sh Wed Apr 17 13:41:18 2019 -0400 @@ -0,0 +1,107 @@ +#!/bin/bash +# +# produces two repositories with different common and missing subsets +# +# $ discovery-helper.sh REPO NBHEADS DEPT +# +# The Goal is to produce two repositories with some common part and some +# exclusive part on each side. Provide a source repository REPO, it will +# produce two repositories REPO-left and REPO-right. +# +# Each repository will be missing some revisions exclusive to NBHEADS of the +# repo topological heads. These heads and revisions exclusive to them (up to +# DEPTH depth) are stripped. +# +# The "left" repository will use the NBHEADS first heads (sorted by +# description). The "right" use the last NBHEADS one. +# +# To find out how many topological heads a repo has, use: +# +# $ hg heads -t -T '{rev}\n' | wc -l +# +# Example: +# +# The `pypy-2018-09-01` repository has 192 heads. To produce two repositories +# with 92 common heads and ~50 exclusive heads on each side. +# +# $ ./discovery-helper.sh pypy-2018-08-01 50 10 + +set -euo pipefail + +printusage () { + echo "usage: `basename $0` REPO NBHEADS DEPTH [left|right]" >&2 +} + +if [ $# -lt 3 ]; then + printusage + exit 64 +fi + +repo="$1" +shift + +nbheads="$1" +shift + +depth="$1" +shift + +doleft=1 +doright=1 +if [ $# -gt 1 ]; then + printusage + exit 64 +elif [ $# -eq 1 ]; then + if [ "$1" == "left" ]; then + doleft=1 + doright=0 + elif [ "$1" == "right" ]; then + doleft=0 + doright=1 + else + printusage + exit 64 + fi +fi + +leftrepo="${repo}-${nbheads}h-${depth}d-left" +rightrepo="${repo}-${nbheads}h-${depth}d-right" + +left="first(sort(heads(all()), 'desc'), $nbheads)" +right="last(sort(heads(all()), 'desc'), $nbheads)" + +leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))" +rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))" + +echo '### creating left/right repositories with missing changesets:' +if [ $doleft -eq 1 ]; then + echo '# left revset:' '"'${leftsubset}'"' +fi +if [ $doright -eq 1 ]; then + echo '# right revset:' '"'${rightsubset}'"' +fi + +buildone() { + side="$1" + dest="$2" + revset="$3" + echo "### building $side repository: $dest" + if [ -e "$dest" ]; then + echo "destination repo already exists: $dest" >&2 + exit 1 + fi + echo '# cloning' + if ! cp --recursive --reflink=always ${repo} ${dest}; then + hg clone --noupdate "${repo}" "${dest}" + fi + echo '# stripping' '"'${revset}'"' + hg -R "${dest}" --config extensions.strip= strip --rev "$revset" --no-backup +} + +if [ $doleft -eq 1 ]; then + buildone left "$leftrepo" "$leftsubset" +fi + +if [ $doright -eq 1 ]; then + buildone right "$rightrepo" "$rightsubset" +fi diff -r d1c33b2442a7 -r 4a8d9ed86475 contrib/perf.py --- a/contrib/perf.py Tue Mar 19 09:23:35 2019 -0400 +++ b/contrib/perf.py Wed Apr 17 13:41:18 2019 -0400 @@ -1,5 +1,34 @@ # perf.py - performance test routines -'''helper extension to measure performance''' +'''helper extension to measure performance + +Configurations +============== + +``perf`` +-------- + +``all-timing`` + When set, additional statistics will be reported for each benchmark: best, + worst, median average. If not set only the best timing is reported + (default: off). + +``presleep`` + number of second to wait before any group of runs (default: 1) + +``run-limits`` + Control the number of runs each benchmark will perform. The option value + should be a list of `